diff --git a/.chglog/CHANGELOG.tpl.md b/.chglog/CHANGELOG.tpl.md index b1fa7de1f58..b8919554b80 100755 --- a/.chglog/CHANGELOG.tpl.md +++ b/.chglog/CHANGELOG.tpl.md @@ -1,10 +1,13 @@ + + + {{ if .Versions -}} -## Unreleased +# Unreleased {{ if .Unreleased.CommitGroups -}} {{ range .Unreleased.CommitGroups -}} -### {{ .Title }} +## {{ .Title }} {{ range .Commits -}} * {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} @@ -17,7 +20,8 @@ ## {{ if .Tag.Previous }}[{{ .Tag.Name }}]{{ else }}{{ .Tag.Name }}{{ end }} - {{ datetime "2006-01-02" .Tag.Date }} {{ range .CommitGroups -}} -### {{ .Title }} + +## {{ .Title }} {{ range .Commits -}} * {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }} @@ -25,14 +29,14 @@ {{ end -}} {{- if .RevertCommits -}} -### Reverts +## Reverts {{ range .RevertCommits -}} * {{ .Revert.Header }} {{ end }} {{ end -}} {{- if .MergeCommits -}} -### Pull Requests +## Pull Requests {{ range .MergeCommits -}} * {{ .Header }} @@ -41,7 +45,7 @@ {{- if .NoteGroups -}} {{ range .NoteGroups -}} -### {{ .Title }} +## {{ .Title }} {{ range .Notes }} {{ .Body }} {{ end }} diff --git a/.chglog/config.yml b/.chglog/config.yml index 3392563d445..21577651526 100755 --- a/.chglog/config.yml +++ b/.chglog/config.yml @@ -32,3 +32,6 @@ options: notes: keywords: - BREAKING CHANGE + # issues: + # prefix: + # - # diff --git a/.github/mergify.yml b/.github/mergify.yml index a623796a514..dc3f1953586 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -22,7 +22,6 @@ # - name: Automatic merge ⬇️ on approval ✔ # conditions: -# - base!=master # - "#approved-reviews-by>=2" # actions: # queue: diff --git a/.github/scripts/constants.js b/.github/scripts/constants.js new file mode 100644 index 00000000000..2c1d6f9ab76 --- /dev/null +++ b/.github/scripts/constants.js @@ -0,0 +1,54 @@ +module.exports = Object.freeze({ + /** @type {string} */ + // Values: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request + "PR_ACTION": process.env.PR_ACTION || "", + + /** @type {string} */ + "PR_AUTHOR": process.env.PR_AUTHOR?.replace(/"/g, '') || "", + + /** @type {string} */ + "PR_BODY": process.env.PR_BODY || "", + + /** @type {string} */ + "PR_TITLE": process.env.PR_TITLE || "", + + /** @type {number} */ + "PR_NUMBER": process.env.PR_NUMBER || 0, + + /** @type {string} */ + "PR_IS_MERGED": process.env.PR_IS_MERGED || "false", + + /** @type {string} */ + "LABEL_BLOCK": "do-not-merge", + + /** @type {string} */ + "LABEL_BLOCK_REASON": "need-issue", + + /** @type {string} */ + "LABEL_PENDING_RELEASE": "pending-release", + + /** @type {string} */ + "HANDLE_MAINTAINERS_TEAM": "@awslabs/aws-lambda-powertools-python", + + /** @type {string[]} */ + "IGNORE_AUTHORS": ["dependabot[bot]", "markdownify[bot]"], + + /** @type {string[]} */ + "AREAS": [ + "tracer", + "metrics", + "utilities", + "logger", + "event_handlers", + "middleware_factory", + "idempotency", + "event_sources", + "feature_flags", + "parameters", + "batch", + "parser", + "validator", + "jmespath_util", + "lambda-layers", + ], +}); diff --git a/.github/scripts/download_pr_artifact.js b/.github/scripts/download_pr_artifact.js new file mode 100644 index 00000000000..274467c1f1c --- /dev/null +++ b/.github/scripts/download_pr_artifact.js @@ -0,0 +1,26 @@ +module.exports = async ({github, context, core}) => { + const fs = require('fs'); + + const workflowRunId = process.env.WORKFLOW_ID; + core.info(`Listing artifacts for workflow run ${workflowRunId}`); + + const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: workflowRunId, + }); + + const matchArtifact = artifacts.data.artifacts.filter(artifact => artifact.name == "pr")[0]; + + core.info(`Downloading artifacts for workflow run ${workflowRunId}`); + const artifact = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + + core.info("Saving artifact found", artifact); + + fs.writeFileSync('pr.zip', Buffer.from(artifact.data)); +} diff --git a/.github/scripts/label_missing_related_issue.js b/.github/scripts/label_missing_related_issue.js new file mode 100644 index 00000000000..705e414c47f --- /dev/null +++ b/.github/scripts/label_missing_related_issue.js @@ -0,0 +1,40 @@ +const { + PR_ACTION, + PR_AUTHOR, + PR_BODY, + PR_NUMBER, + IGNORE_AUTHORS, + LABEL_BLOCK, + LABEL_BLOCK_REASON +} = require("./constants") + +module.exports = async ({github, context, core}) => { + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } + + if (PR_ACTION != "opened") { + return core.notice("Only newly open PRs are labelled to avoid spam; skipping") + } + + const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + const isMatch = RELATED_ISSUE_REGEX.exec(PR_BODY); + if (isMatch == null) { + core.info(`No related issue found, maybe the author didn't use the template but there is one.`) + + let msg = "No related issues found. Please ensure there is an open issue related to this change to avoid significant delays or closure."; + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + body: msg, + issue_number: PR_NUMBER, + }); + + return await github.rest.issues.addLabels({ + issue_number: PR_NUMBER, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [LABEL_BLOCK, LABEL_BLOCK_REASON] + }) + } +} diff --git a/.github/scripts/label_pr_based_on_title.js b/.github/scripts/label_pr_based_on_title.js new file mode 100644 index 00000000000..4ae3c1ff321 --- /dev/null +++ b/.github/scripts/label_pr_based_on_title.js @@ -0,0 +1,60 @@ +const { PR_NUMBER, PR_TITLE, AREAS } = require("./constants") + +module.exports = async ({github, context, core}) => { + const FEAT_REGEX = /feat(\((.+)\))?(:.+)/ + const BUG_REGEX = /(fix|bug)(\((.+)\))?(:.+)/ + const DOCS_REGEX = /(docs|doc)(\((.+)\))?(:.+)/ + const CHORE_REGEX = /(chore)(\((.+)\))?(:.+)/ + const DEPRECATED_REGEX = /(deprecated)(\((.+)\))?(:.+)/ + const REFACTOR_REGEX = /(refactor)(\((.+)\))?(:.+)/ + + const labels = { + "feature": FEAT_REGEX, + "bug": BUG_REGEX, + "documentation": DOCS_REGEX, + "internal": CHORE_REGEX, + "enhancement": REFACTOR_REGEX, + "deprecated": DEPRECATED_REGEX, + } + + // Maintenance: We should keep track of modified PRs in case their titles change + let miss = 0; + try { + for (const label in labels) { + const matcher = new RegExp(labels[label]) + const matches = matcher.exec(PR_TITLE) + if (matches != null) { + core.info(`Auto-labeling PR ${PR_NUMBER} with ${label}`) + + await github.rest.issues.addLabels({ + issue_number: PR_NUMBER, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [label] + }) + + const area = matches[2]; // second capture group contains the area + if (AREAS.indexOf(area) > -1) { + core.info(`Auto-labeling PR ${PR_NUMBER} with area ${area}`); + await github.rest.issues.addLabels({ + issue_number: PR_NUMBER, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [`area/${area}`], + }); + } else { + core.debug(`'${PR_TITLE}' didn't match any known area.`); + } + + return; + } else { + core.debug(`'${PR_TITLE}' didn't match '${label}' semantic.`) + miss += 1 + } + } + } finally { + if (miss == Object.keys(labels).length) { + core.notice(`PR ${PR_NUMBER} title '${PR_TITLE}' doesn't follow semantic titles; skipping...`) + } + } +} diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index 1953412ae41..e01868d36dc 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -1,30 +1,53 @@ -module.exports = async ({github, context}) => { - const prBody = context.payload.body; - const prNumber = context.payload.number; - const releaseLabel = process.env.RELEASE_LABEL; - const maintainersTeam = process.env.MAINTAINERS_TEAM +const { + PR_AUTHOR, + PR_BODY, + PR_NUMBER, + IGNORE_AUTHORS, + LABEL_PENDING_RELEASE, + HANDLE_MAINTAINERS_TEAM, + PR_IS_MERGED, +} = require("./constants") - const RELATED_ISSUE_REGEX = /Issue number:.+(\d)/ +module.exports = async ({github, context, core}) => { + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } + + if (PR_IS_MERGED == "false") { + return core.notice("Only merged PRs to avoid spam; skipping") + } + + const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; - const matcher = new RegExp(RELATED_ISSUE_REGEX) - const isMatch = matcher.exec(prBody) - if (isMatch != null) { - let relatedIssueNumber = isMatch[1] - console.info(`Auto-labeling related issue ${relatedIssueNumber} for release`) + const isMatch = RELATED_ISSUE_REGEX.exec(PR_BODY); - return await github.rest.issues.addLabels({ - issue_number: relatedIssueNumber, + try { + if (!isMatch) { + core.setFailed(`Unable to find related issue for PR number ${PR_NUMBER}.\n\n Body details: ${PR_BODY}`); + return await github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, - labels: [releaseLabel] - }) - } else { - let msg = `${maintainersTeam} No related issues found. Please ensure '${releaseLabel}' label is applied before releasing.`; - return await github.rest.issues.createComment({ + body: `${HANDLE_MAINTAINERS_TEAM} No related issues found. Please ensure '${LABEL_PENDING_RELEASE}' label is applied before releasing.`, + issue_number: PR_NUMBER, + }); + } + } catch (error) { + core.setFailed(`Unable to create comment on PR number ${PR_NUMBER}.\n\n Error details: ${error}`); + throw new Error(error); + } + + const { groups: {issue} } = isMatch + + try { + core.info(`Auto-labeling related issue ${issue} for release`) + await github.rest.issues.addLabels({ + issue_number: issue, owner: context.repo.owner, repo: context.repo.repo, - body: msg, - issue_number: prNumber, - }); + labels: [LABEL_PENDING_RELEASE] + }) + } catch (error) { + core.setFailed(`Is this issue number (${issue}) valid? Perhaps a discussion?`); + throw new Error(error); } } diff --git a/.github/workflows/post_release.js b/.github/scripts/post_release.js similarity index 94% rename from .github/workflows/post_release.js rename to .github/scripts/post_release.js index 70474740e2e..d6a598f1960 100644 --- a/.github/workflows/post_release.js +++ b/.github/scripts/post_release.js @@ -100,13 +100,13 @@ const notifyRelease = async ({ // context: https://github.com/actions/toolkit/blob/main/packages/github/src/context.ts module.exports = async ({ github, context }) => { - const { RELEASE_TAG_VERSION } = process.env; - console.log(`Running post-release script for ${RELEASE_TAG_VERSION} version`); + const { RELEASE_VERSION } = process.env; + console.log(`Running post-release script for ${RELEASE_VERSION} version`); await notifyRelease({ gh_client: github, owner: context.repo.owner, repository: context.repo.repo, - release_version: RELEASE_TAG_VERSION, + release_version: RELEASE_VERSION, }); }; diff --git a/.github/scripts/save_pr_details.js b/.github/scripts/save_pr_details.js new file mode 100644 index 00000000000..83bd3bf70d4 --- /dev/null +++ b/.github/scripts/save_pr_details.js @@ -0,0 +1,13 @@ +module.exports = async ({context, core}) => { + const fs = require('fs'); + const filename = "pr.txt"; + + try { + fs.writeFileSync(`./${filename}`, JSON.stringify(context.payload)); + + return `PR successfully saved ${filename}` + } catch (err) { + core.setFailed("Failed to save PR details"); + console.error(err); + } +} diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 7ce6ff8ba29..b63d0331edd 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -18,7 +18,21 @@ jobs: with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Enable auto-merge for mypy-boto3 stubs Dependabot PRs - if: ${{contains(steps.metadata.outputs.dependency-names, 'mypy-boto3')}} # && steps.metadata.outputs.update-type == 'version-update:semver-patch' + if: ${{ contains(steps.metadata.outputs.dependency-names, 'mypy-boto3') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + # Maintenance: Experiment with literal array (toJSON('["dep1", "dep2"]')) to ease extending it + - name: Enable auto-merge for CDK Construct Lambda Layer Dependabot PRs + if: ${{ contains(steps.metadata.outputs.dependency-names, 'cdk-lambda-powertools-python-layer') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + # Maintenance: Revisit if CDK Constructs make breaking changes like CDK v1 + - name: Enable auto-merge for CDK Lib Construct + if: ${{ contains(steps.metadata.outputs.dependency-names, 'aws-cdk-lib') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} run: gh pr merge --auto --squash "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} diff --git a/.github/workflows/export_pr_details.yml b/.github/workflows/export_pr_details.yml deleted file mode 100644 index af03150c3d5..00000000000 --- a/.github/workflows/export_pr_details.yml +++ /dev/null @@ -1,75 +0,0 @@ -on: - workflow_call: - inputs: - record_pr_workflow_id: - required: true - type: number - secrets: - token: - required: true - # Map the workflow outputs to job outputs - outputs: - prNumber: - description: "The first output string" - value: ${{ jobs.export_pr_details.outputs.prNumber }} - prTitle: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prTitle }} - prBody: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prBody }} - prAuthor: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prAuthor }} - prAction: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prAction }} - -name: Export Pull Request details from fork -jobs: - export_pr_details: - runs-on: ubuntu-latest - # Map the job outputs to step outputs - outputs: - prNumber: ${{ steps.prNumber.outputs.prNumber }} - prTitle: ${{ steps.prTitle.outputs.prTitle }} - prBody: ${{ steps.prBody.outputs.prBody }} - prAuthor: ${{ steps.prAuthor.outputs.prAuthor }} - prAction: ${{ steps.prAction.outputs.prAction }} - steps: - - name: "Download artifact" - uses: actions/github-script@v6 - # For security, we only download artifacts tied to the successful PR recording workflow - with: - github-token: ${{ secrets.token }} - script: | - const fs = require('fs'); - - const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{inputs.record_pr_workflow_id}}, - }); - - const matchArtifact = artifacts.data.artifacts.filter(artifact => artifact.name == "pr")[0]; - - const artifact = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - - fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(artifact.data)); - # NodeJS standard library doesn't provide ZIP capabilities; use system `unzip` command instead - - run: unzip pr.zip - - id: prNumber - run: echo ::set-output name=prNumber::$(cat ./number) - - id: prTitle - run: echo ::set-output name=prTitle::$(cat ./title) - - id: prBody - run: echo ::set-output name=prBody::$(cat ./body) - - id: prAuthor - run: echo ::set-output name=prAuthor::$(cat ./author) - - id: prAction - run: echo ::set-output name=prAction::$(cat ./action) diff --git a/.github/workflows/label_pr_on_title.yml b/.github/workflows/label_pr_on_title.yml index 06e3f552cc2..562959bb516 100644 --- a/.github/workflows/label_pr_on_title.yml +++ b/.github/workflows/label_pr_on_title.yml @@ -2,86 +2,36 @@ name: Label PR based on title on: workflow_run: - workflows: ["Record PR number"] + workflows: ["Record PR details"] types: - completed jobs: - upload: - runs-on: ubuntu-latest + get_pr_details: # Guardrails to only ever run if PR recording workflow was indeed # run in a PR event and ran successfully - if: > - ${{ github.event.workflow_run.event == 'pull_request' && - github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event.workflow_run.conclusion == 'success' }} + uses: ./.github/workflows/reusable_export_pr_details.yml + with: + record_pr_workflow_id: ${{ github.event.workflow_run.id }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} + label_pr: + needs: get_pr_details + runs-on: ubuntu-latest steps: - - name: 'Download artifact' - uses: actions/github-script@v6 - # For security, we only download artifacts tied to the successful PR recording workflow - with: - script: | - const fs = require('fs'); - - const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, - }); - - const matchArtifact = artifacts.data.artifacts.filter(artifact => artifact.name == "pr")[0]; - - const artifact = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - - fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(artifact.data)); - # NodeJS standard library doesn't provide ZIP capabilities; use system `unzip` command instead - - run: unzip pr.zip - - - name: 'Label PR based on title' + - name: Checkout repository + uses: actions/checkout@v3 + - name: "Label PR based on title" uses: actions/github-script@v6 + env: + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_TITLE: ${{ needs.get_pr_details.outputs.prTitle }} with: github-token: ${{ secrets.GITHUB_TOKEN }} # This safely runs in our base repo, not on fork # thus allowing us to provide a write access token to label based on PR title # and label PR based on semantic title accordingly script: | - const fs = require('fs'); - const pr_number = Number(fs.readFileSync('./number')); - const pr_title = fs.readFileSync('./title', 'utf-8').trim(); - - const FEAT_REGEX = /feat(\((.+)\))?(\:.+)/ - const BUG_REGEX = /(fix|bug)(\((.+)\))?(\:.+)/ - const DOCS_REGEX = /(docs|doc)(\((.+)\))?(\:.+)/ - const CHORE_REGEX = /(chore)(\((.+)\))?(\:.+)/ - const DEPRECATED_REGEX = /(deprecated)(\((.+)\))?(\:.+)/ - const REFACTOR_REGEX = /(refactor)(\((.+)\))?(\:.+)/ - - const labels = { - "feature": FEAT_REGEX, - "bug": BUG_REGEX, - "documentation": DOCS_REGEX, - "internal": CHORE_REGEX, - "enhancement": REFACTOR_REGEX, - "deprecated": DEPRECATED_REGEX, - } - - for (const label in labels) { - const matcher = new RegExp(labels[label]) - const isMatch = matcher.exec(pr_title) - if (isMatch != null) { - console.info(`Auto-labeling PR ${pr_number} with ${label}`) - - await github.rest.issues.addLabels({ - issue_number: pr_number, - owner: context.repo.owner, - repo: context.repo.repo, - labels: [label] - }) - - break - } - } + const script = require('.github/scripts/label_pr_based_on_title.js') + await script({github, context, core}) diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index 97029740cdb..3f1bcb57237 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -1,25 +1,32 @@ -# Maintenance: Verify why we're having permissions issues even with write scope, then re-enable it. -# logs: https://github.com/awslabs/aws-lambda-powertools-python/runs/7030238348?check_suite_focus=true +name: On PR merge on: - pull_request: + workflow_run: + workflows: ["Record PR details"] types: - - closed - -env: - RELEASE_LABEL: "pending-release" - MAINTAINERS_TEAM: "@awslabs/aws-lambda-powertools-python" + - completed jobs: + get_pr_details: + if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' + uses: ./.github/workflows/reusable_export_pr_details.yml + with: + record_pr_workflow_id: ${{ github.event.workflow_run.id }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} release_label_on_merge: - if: github.event.pull_request.merged == true && github.event.pull_request.user.login != 'dependabot[bot]' + needs: get_pr_details runs-on: ubuntu-latest - permissions: - issues: write # required for new scoped token - pull-requests: write # required for new scoped token + if: needs.get_pr_details.outputs.prIsMerged == 'true' steps: + - uses: actions/checkout@v3 - name: "Label PR related issue for release" uses: actions/github-script@v6 + env: + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} + PR_IS_MERGED: ${{ needs.get_pr_details.outputs.prIsMerged }} + PR_AUTHOR: ${{ needs.get_pr_details.outputs.prAuthor }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 9a539976467..3d5aab45b5d 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -1,59 +1,33 @@ +name: On new PR + on: workflow_run: - workflows: ["Record PR number"] + workflows: ["Record PR details"] types: - completed -env: - BLOCK_LABEL: "do-not-merge" - BLOCK_REASON_LABEL: "need-issue" - jobs: get_pr_details: - if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' - uses: ./.github/workflows/export_pr_details.yml + if: ${{ github.event.workflow_run.conclusion == 'success' }} + uses: ./.github/workflows/reusable_export_pr_details.yml with: record_pr_workflow_id: ${{ github.event.workflow_run.id }} secrets: token: ${{ secrets.GITHUB_TOKEN }} check_related_issue: needs: get_pr_details - if: > - ${{ needs.get_pr_details.outputs.prAuthor != 'dependabot[bot]' && - needs.get_pr_details.outputs.prAction == 'opened' - }} runs-on: ubuntu-latest steps: + - uses: actions/checkout@v3 - name: "Ensure related issue is present" uses: actions/github-script@v6 + env: + PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_ACTION: ${{ needs.get_pr_details.outputs.prAction }} + PR_AUTHOR: ${{ needs.get_pr_details.outputs.prAuthor }} with: github-token: ${{ secrets.GITHUB_TOKEN }} - # Maintenance: convert into a standalone JS like post_release.js script: | - const prBody = "${{ needs.get_pr_details.outputs.prBody }}"; - const prNumber = ${{ needs.get_pr_details.outputs.prNumber }}; - const blockLabel = process.env.BLOCK_LABEL; - const blockReasonLabel = process.env.BLOCK_REASON_LABEL; - - const RELATED_ISSUE_REGEX = /Issue number:.+(\d)/ - - const matcher = new RegExp(RELATED_ISSUE_REGEX) - const isMatch = matcher.exec(prBody) - if (isMatch == null) { - console.info(`No related issue found, maybe the author didn't use the template but there is one.`) - - let msg = "No related issues found. Please ensure there is an open issue related to this change to avoid significant delays or closure."; - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - body: msg, - issue_number: prNumber, - }); - - await github.rest.issues.addLabels({ - issue_number: prNumber, - owner: context.repo.owner, - repo: context.repo.repo, - labels: [blockLabel, blockReasonLabel] - }) - } + const script = require('.github/scripts/label_missing_related_issue.js') + await script({github, context, core}) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 218df1aef35..1473fba2fcb 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -4,45 +4,38 @@ name: Publish to PyPi # # === Manual activities === # -# 1. Document human readable changes in CHANGELOG (pre-generate unreleased changes with `make changelog`) -# 2. Bump package version using poetry version -# 3. Merge version changes to develop branch -# 4. Edit the current draft release notes -# 5. If not already set, use `v` as a tag, and select develop as target branch +# 1. Edit the current draft release notes +# 2. If not already set, use `v` as a tag, e.g., v1.26.4, and select develop as target branch # # === Automated activities === # # 1. Extract release notes tag that was published -# 2. Ensure release notes tag match what's in CHANGELOG and pyproject -# 3. Run tests, linting, security and complexity base line -# 4. Publish package to PyPi test repository -# 5. Publish package to PyPi prod repository -# 6. Kick off Lambda Layer pipeline to publish latest version with minimal dependencies as a SAR App -# 7. Kick off Lambda Layer pipeline to publish latest version with extra dependencies as a SAR App -# 8. Builds a fresh version of docs including Changelog updates -# 9. Push latest release source code to master using release title as the commit message -# 10. Builds latest documentation for new release, and update latest alias pointing to the new release tag -# 11. Close and notify all issues labeled "pending-release" about the release details +# 2. Run tests, linting, security and complexity base line +# 3. Bump package version and generate latest Changelog +# 4. Publish package to PyPi test and prod repository +# 5. Kick off SAR App pipeline to publish latest version with minimal and extra dependencies +# 6. Builds a new user guide and API docs with release version; update /latest pointing to newly released version +# 7. Close all issues labeled "pending-release" and notify customers about the release -# -# === Fallback mechanism due to external failures === -# -# 1. Trigger "Publish to PyPi" workflow manually: https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow -# 2. Use the version released under Releases e.g. v1.13.0 -# - -# -# === Documentation hotfix === -# -# Look for rebuild latest docs workflow +# See MAINTAINERS.md "Releasing a new version" for release mechanisms on: release: types: [published] + workflow_dispatch: + inputs: + version_to_publish: + description: "Version to be released in PyPi, Docs, and Lambda Layer, e.g. v1.26.4" + default: v1.26.4 + required: true jobs: release: runs-on: ubuntu-latest + outputs: + RELEASE_VERSION: ${{ steps.release_version.outputs.RELEASE_VERSION }} + env: + RELEASE_TAG_VERSION: ${{ github.event.release.tag_name || inputs.version_to_publish }} steps: - uses: actions/checkout@v3 with: @@ -52,17 +45,30 @@ jobs: with: python-version: "3.8" - name: Set release notes tag + id: release_version + # transform tag format `v> $GITHUB_ENV - - name: Ensure new version is also set in pyproject and CHANGELOG - run: | - grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md - grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml + RELEASE_VERSION=${RELEASE_TAG_VERSION:1} + echo "RELEASE_VERSION=${RELEASE_VERSION}" >> $GITHUB_ENV + echo "::set-output name=RELEASE_VERSION::${RELEASE_VERSION}" - name: Install dependencies run: make dev - name: Run all tests, linting and baselines run: make pr + - name: Bump package version + run: poetry version ${RELEASE_VERSION} + - name: Generate latest CHANGELOG + run: make changelog + - name: Setup git client + run: | + git config user.name "Release bot" + git config user.email aws-devax-open-source@amazon.com + - name: Push project metadata and changelog to trunk + run: | + git add CHANGELOG.md + git add pyproject.toml + git commit -m "chore(ci): update project with version ${RELEASE_VERSION}" + git push origin HEAD:refs/heads/develop - name: Build python package and wheel run: poetry build - name: Upload to PyPi test @@ -77,7 +83,7 @@ jobs: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - name: publish lambda layer in SAR by triggering the internal codepipeline run: | - aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite + aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_VERSION --overwrite aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }} env: # Maintenance: Migrate to new OAuth mechanism @@ -85,43 +91,50 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: eu-west-1 AWS_DEFAULT_OUTPUT: json - - name: Setup doc deploy + + docs: + needs: release + runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ needs.release.outputs.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Setup git client run: | - git config --global user.name Docs deploy - git config --global user.email aws-devax-open-source@amazon.com + git config user.name "Release bot" + git config user.email aws-devax-open-source@amazon.com - name: Build docs website and API reference run: | - make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" + make release-docs VERSION=${RELEASE_VERSION} ALIAS="latest" poetry run mike set-default --push latest - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./api keep_files: true - destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api + destination_dir: ${{ env.RELEASE_VERSION }}/api - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./api keep_files: true destination_dir: latest/api - - name: Close issues related to this release - uses: actions/github-script@v6 - with: - script: | - const post_release = require('.github/workflows/post_release.js') - await post_release({github, context, core}) - sync_master: + post_release: needs: release runs-on: ubuntu-latest + env: + RELEASE_VERSION: ${{ needs.release.outputs.RELEASE_VERSION }} steps: - uses: actions/checkout@v3 - - name: Sync master from detached head - # If version matches CHANGELOG and pyproject.toml - # If it passes all checks, successfully releases to test and prod - # Then sync up master with latest source code release - # where commit message will be Release notes title - run: git push origin HEAD:refs/heads/master --force + - name: Close issues related to this release + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const post_release = require('.github/scripts/post_release.js') + await post_release({github, context, core}) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml new file mode 100644 index 00000000000..3fe6afd6098 --- /dev/null +++ b/.github/workflows/publish_layer.yml @@ -0,0 +1,78 @@ +name: Deploy layer to all regions + +permissions: + id-token: write + contents: read + +on: + workflow_dispatch: + inputs: + latest_published_version: + description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0" + default: "v1.22.0" + required: true + workflow_run: + workflows: ["Publish to PyPi"] + types: + - completed + +jobs: + build-layer: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./layer + steps: + - name: checkout + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: "16.12" + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + cache: "pip" + - name: Set release notes tag + run: | + RELEASE_INPUT=${{ inputs.latest_published_version }} + GITHUB_EVENT_RELEASE_TAG=${{ github.event.release.tag_name }} + RELEASE_TAG_VERSION=${GITHUB_EVENT_RELEASE_TAG:-$RELEASE_INPUT} + echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV + - name: install cdk and deps + run: | + npm install -g aws-cdk@2.29.0 + cdk --version + - name: install deps + run: | + pip install -r requirements.txt + - name: CDK build + run: cdk synth --context version=$RELEASE_TAG_VERSION -o cdk.out + - name: zip output + run: zip -r cdk.out.zip cdk.out + - name: Archive CDK artifacts + uses: actions/upload-artifact@v3 + with: + name: cdk-layer-artefact + path: layer/cdk.out.zip + + deploy-beta: + needs: + - build-layer + uses: ./.github/workflows/reusable_deploy_layer_stack.yml + with: + stage: "BETA" + artefact-name: "cdk-layer-artefact" + secrets: + target-account-role: arn:aws:iam::${{ secrets.LAYERS_BETA_ACCOUNT }}:role/${{ secrets.AWS_GITHUB_OIDC_ROLE }} + + deploy-prod: + needs: + - deploy-beta + uses: ./.github/workflows/reusable_deploy_layer_stack.yml + with: + stage: "PROD" + artefact-name: "cdk-layer-artefact" + secrets: + target-account-role: arn:aws:iam::${{ secrets.LAYERS_PROD_ACCOUNT }}:role/${{ secrets.AWS_GITHUB_OIDC_ROLE }} diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index c0cdcc85cff..b7599279c43 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -10,7 +10,6 @@ on: - "mypy.ini" branches: - develop - - master push: paths: - "aws_lambda_powertools/**" @@ -20,7 +19,6 @@ on: - "mypy.ini" branches: - develop - - master jobs: build: @@ -51,11 +49,10 @@ jobs: - name: Complexity baseline run: make complexity-baseline - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3.1.0 + uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # 3.1.0 with: file: ./coverage.xml # flags: unittests env_vars: OS,PYTHON name: aws-lambda-powertools-python-codecov # fail_ci_if_error: true # failing more consistently making CI unreliable despite all tests above passing - token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index 3a6e15e5431..30f88ef976e 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -1,17 +1,23 @@ name: Docs +# Maintenance: Create a reusable workflow to be more easily reused across release, push, and doc hot fixes +# this should include inputs on whether to release API docs, what version to release, and whether to rebuild /latest + on: push: branches: - develop paths: - "docs/**" - - "CHANGELOG.md" - "mkdocs.yml" - "examples/**" jobs: docs: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` + concurrency: + group: on-docs-build runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -23,7 +29,7 @@ jobs: python-version: "3.8" # Maintenance: temporarily until we drop Python 3.6 and make cfn-lint a dev dependency - name: Setup Cloud Formation Linter with Latest Version - uses: scottbrenner/cfn-lint-action@v2 + uses: scottbrenner/cfn-lint-action@ee9ee62016ef62c5fd366e6be920df4b310ed353 # v2.2.4 - name: Install dependencies run: make dev - name: Lint documentation @@ -37,7 +43,7 @@ jobs: - name: Build docs website and API reference run: make release-docs VERSION="develop" ALIAS="stage" - name: Deploy all docs - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./api diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index b185556f2ff..48399bbae7f 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -7,56 +7,58 @@ name: Rebuild latest docs # 2. Use the latest version released under Releases e.g. v1.22.0 # 3. Set `Build and publish docs only` field to `true` - on: workflow_dispatch: inputs: latest_published_version: - description: 'Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0' - default: 'v1.22.0' + description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0" + default: "v1.22.0" required: true - jobs: release: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` + concurrency: + group: on-docs-rebuild runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - - name: Set release notes tag - run: | - RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} - echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV - - name: Ensure new version is also set in pyproject and CHANGELOG - run: | - grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md - grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml - - name: Install dependencies - run: make dev - - name: Setup doc deploy - run: | - git config --global user.name Docs deploy - git config --global user.email aws-devax-open-source@amazon.com - - name: Build docs website and API reference - run: | - make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" - poetry run mike set-default --push latest - - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api - - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: latest/api + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + - name: Set release notes tag + run: | + RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} + echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV + - name: Ensure new version is also set in pyproject and CHANGELOG + run: | + grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md + grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml + - name: Install dependencies + run: make dev + - name: Setup doc deploy + run: | + git config --global user.name Docs deploy + git config --global user.email aws-devax-open-source@amazon.com + - name: Build docs website and API reference + run: | + make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" + poetry run mike set-default --push latest + - name: Release API docs to release version + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api + - name: Release API docs to latest + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: latest/api diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml index fcee8876286..44f445a70ac 100644 --- a/.github/workflows/record_pr.yml +++ b/.github/workflows/record_pr.yml @@ -1,24 +1,22 @@ -name: Record PR number +name: Record PR details on: pull_request: - types: [opened, edited] + types: [opened, edited, closed] jobs: - build: + record_pr: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Save PR number - run: | - mkdir -p ./pr - echo ${{ github.event.number }} > ./pr/number - echo "${{ github.event.pull_request.title }}" > ./pr/title - echo "${{ github.event.pull_request.body }}" > ./pr/body - echo "${{ github.event.pull_request.user.login }}" > ./pr/author - echo "${{ github.event.action }}" > ./pr/action + - name: "Extract PR details" + uses: actions/github-script@v6 + with: + script: | + const script = require('.github/scripts/save_pr_details.js') + await script({github, context, core}) - uses: actions/upload-artifact@v3 with: name: pr - path: pr/ + path: pr.txt diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index ecc2251ec43..54d8c5ea723 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -2,7 +2,6 @@ name: Release Drafter on: push: - # branches to consider in the event; optional, defaults to all branches: - develop workflow_dispatch: @@ -11,6 +10,6 @@ jobs: update_release_draft: runs-on: ubuntu-latest steps: - - uses: release-drafter/release-drafter@v5 + - uses: release-drafter/release-drafter@ac463ffd9cc4c6ad5682af93dc3e3591c4657ee3 # v5.20.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml new file mode 100644 index 00000000000..74d2dc19767 --- /dev/null +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -0,0 +1,85 @@ +name: Deploy cdk stack + +permissions: + id-token: write + contents: read + +on: + workflow_call: + inputs: + stage: + required: true + type: string + artefact-name: + required: true + type: string + secrets: + target-account-role: + required: true + +jobs: + deploy-cdk-stack: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./layer + strategy: + fail-fast: false + matrix: + region: ["af-south-1"] + # "eu-central-1", + # "us-east-1", + # "us-east-2", + # "us-west-1", + # "us-west-2", + # "ap-east-1", + # "ap-south-1", + # "ap-northeast-1", + # "ap-northeast-2", + # "ap-southeast-1", + # "ap-southeast-2", + # "ca-central-1", + # "eu-west-1", + # "eu-west-2", + # "eu-west-3", + # "eu-south-1", + # "eu-north-1", + # "sa-east-1", + # "ap-southeast-3", + # "ap-northeast-3", + # "me-south-1" + steps: + - name: checkout + uses: actions/checkout@v3 + - name: aws credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-region: ${{ matrix.region }} + role-to-assume: ${{ secrets.target-account-role }} + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: "16.12" + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + cache: "pip" + - name: install cdk and deps + run: | + npm install -g aws-cdk@2.29.0 + cdk --version + - name: install deps + run: | + pip install -r requirements.txt + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.artefact-name }} + path: layer + - name: unzip artefact + run: unzip cdk.out.zip + - name: CDK Deploy Layer + run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack' --require-approval never --verbose + - name: CDK Deploy Canary + run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ inputs.stage }}" 'CanaryStack' --require-approval never --verbose diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml new file mode 100644 index 00000000000..dcbb959a4ea --- /dev/null +++ b/.github/workflows/reusable_export_pr_details.yml @@ -0,0 +1,81 @@ +name: Export previously recorded PR + +on: + workflow_call: + inputs: + record_pr_workflow_id: + required: true + type: number + secrets: + token: + required: true + # Map the workflow outputs to job outputs + outputs: + prNumber: + description: "PR Number" + value: ${{ jobs.export_pr_details.outputs.prNumber }} + prTitle: + description: "PR Title" + value: ${{ jobs.export_pr_details.outputs.prTitle }} + prBody: + description: "PR Body as string" + value: ${{ jobs.export_pr_details.outputs.prBody }} + prAuthor: + description: "PR author username" + value: ${{ jobs.export_pr_details.outputs.prAuthor }} + prAction: + description: "PR event action" + value: ${{ jobs.export_pr_details.outputs.prAction }} + prIsMerged: + description: "Whether PR is merged" + value: ${{ jobs.export_pr_details.outputs.prIsMerged }} + +jobs: + export_pr_details: + runs-on: ubuntu-latest + env: + FILENAME: pr.txt + # Map the job outputs to step outputs + outputs: + prNumber: ${{ steps.prNumber.outputs.prNumber }} + prTitle: ${{ steps.prTitle.outputs.prTitle }} + prBody: ${{ steps.prBody.outputs.prBody }} + prAuthor: ${{ steps.prAuthor.outputs.prAuthor }} + prAction: ${{ steps.prAction.outputs.prAction }} + prIsMerged: ${{ steps.prIsMerged.outputs.prIsMerged }} + steps: + - name: Checkout repository # in case caller workflow doesn't checkout thus failing with file not found + uses: actions/checkout@v3 + - name: "Download previously saved PR" + uses: actions/github-script@v6 + env: + WORKFLOW_ID: ${{ inputs.record_pr_workflow_id }} + # For security, we only download artifacts tied to the successful PR recording workflow + with: + github-token: ${{ secrets.token }} + script: | + const script = require('.github/scripts/download_pr_artifact.js') + await script({github, context, core}) + # NodeJS standard library doesn't provide ZIP capabilities; use system `unzip` command instead + - name: "Unzip PR artifact" + run: unzip pr.zip + # NOTE: We need separate steps for each mapped output and respective IDs + # otherwise the parent caller won't see them regardless on how outputs are set. + - name: "Export Pull Request Number" + id: prNumber + run: echo ::set-output name=prNumber::$(jq -c '.number' ${FILENAME}) + - name: "Export Pull Request Title" + id: prTitle + run: echo ::set-output name=prTitle::$(jq -c '.pull_request.title' ${FILENAME}) + - name: "Export Pull Request Body" + id: prBody + run: echo ::set-output name=prBody::$(jq -c '.pull_request.body' ${FILENAME}) + - name: "Export Pull Request Author" + id: prAuthor + run: echo ::set-output name=prAuthor::$(jq -c '.pull_request.user.login' ${FILENAME}) + - name: "Export Pull Request Action" + id: prAction + run: echo ::set-output name=prAction::$(jq -c '.action' ${FILENAME}) + - name: "Export Pull Request Merged status" + id: prIsMerged + run: echo ::set-output name=prIsMerged::$(jq -c '.pull_request.merged' ${FILENAME}) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml new file mode 100644 index 00000000000..2e186037853 --- /dev/null +++ b/.github/workflows/run-e2e-tests.yml @@ -0,0 +1,32 @@ +name: run-e2e-tests +on: + workflow_dispatch: +env: + AWS_DEFAULT_REGION: us-east-1 + E2E_TESTS_PATH: tests/e2e/ +jobs: + run: + runs-on: ubuntu-latest + permissions: + id-token: write # needed to request JWT with GitHub's OIDC Token endpoint. docs: https://bit.ly/3MNgQO9 + contents: read + strategy: + matrix: + version: ["3.7", "3.8", "3.9"] + steps: + - name: "Checkout" + uses: actions/checkout@v3 + - name: "Use Python" + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.version }} + architecture: "x64" + - name: Install dependencies + run: make dev + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.AWS_TEST_ROLE_ARN }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Test + run: make e2e-test diff --git a/.github/workflows/secure_workflows.yml b/.github/workflows/secure_workflows.yml new file mode 100644 index 00000000000..bbe274806ea --- /dev/null +++ b/.github/workflows/secure_workflows.yml @@ -0,0 +1,32 @@ +name: Lockdown untrusted workflows + +on: + push: + paths: + - ".github/workflows/**" + pull_request: + paths: + - ".github/workflows/**" + +jobs: + enforce_pinned_workflows: + name: Harden Security + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Ensure 3rd party workflows have SHA pinned + uses: zgosalvez/github-actions-ensure-sha-pinned-actions@6ca5574367befbc9efdb2fa25978084159c5902d # v1.3.0 + with: + # Trusted GitHub Actions and/or organizations + allowlist: | + aws-actions/ + actions/checkout + actions/github-script + actions/setup-node + actions/setup-python + actions/upload-artifact + actions/download-artifact + github/codeql-action/init + github/codeql-action/analyze + dependabot/fetch-metadata diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8a614f78968..6a41e0d945c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,6 @@ repos: hooks: - id: check-merge-conflict - id: trailing-whitespace - - id: end-of-file-fixer - id: check-toml - repo: local hooks: diff --git a/MAINTAINERS.md b/MAINTAINERS.md index a706b94c6ab..4ccf87f7b42 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,4 +1,4 @@ - + ## Table of contents - [Overview](#overview) @@ -6,29 +6,28 @@ - [Emeritus](#emeritus) - [Labels](#labels) - [Maintainer Responsibilities](#maintainer-responsibilities) - - [Uphold Code of Conduct](#uphold-code-of-conduct) - - [Prioritize Security](#prioritize-security) - - [Review Pull Requests](#review-pull-requests) - - [Triage New Issues](#triage-new-issues) - - [Triage Bug Reports](#triage-bug-reports) - - [Triage RFCs](#triage-rfcs) - - [Releasing a new version](#releasing-a-new-version) - - [Changelog generation](#changelog-generation) - - [Bumping the version](#bumping-the-version) - - [Drafting release notes](#drafting-release-notes) - - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - - [Manage Roadmap](#manage-roadmap) - - [Add Continuous Integration Checks](#add-continuous-integration-checks) - - [Negative Impact on the Project](#negative-impact-on-the-project) - - [Becoming a maintainer](#becoming-a-maintainer) + - [Uphold Code of Conduct](#uphold-code-of-conduct) + - [Prioritize Security](#prioritize-security) + - [Review Pull Requests](#review-pull-requests) + - [Triage New Issues](#triage-new-issues) + - [Triage Bug Reports](#triage-bug-reports) + - [Triage RFCs](#triage-rfcs) + - [Releasing a new version](#releasing-a-new-version) + - [Drafting release notes](#drafting-release-notes) + - [Run end to end tests](#run-end-to-end-tests) + - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) + - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) + - [Manage Roadmap](#manage-roadmap) + - [Add Continuous Integration Checks](#add-continuous-integration-checks) + - [Negative Impact on the Project](#negative-impact-on-the-project) + - [Becoming a maintainer](#becoming-a-maintainer) - [Common scenarios](#common-scenarios) - - [Contribution is stuck](#contribution-is-stuck) - - [Insufficient feedback or information](#insufficient-feedback-or-information) - - [Crediting contributions](#crediting-contributions) - - [Is that a bug?](#is-that-a-bug) - - [Mentoring contributions](#mentoring-contributions) - - [Long running issues or PRs](#long-running-issues-or-prs) + - [Contribution is stuck](#contribution-is-stuck) + - [Insufficient feedback or information](#insufficient-feedback-or-information) + - [Crediting contributions](#crediting-contributions) + - [Is that a bug?](#is-that-a-bug) + - [Mentoring contributions](#mentoring-contributions) + - [Long running issues or PRs](#long-running-issues-or-prs) ## Overview @@ -52,7 +51,7 @@ Previous active maintainers who contributed to this project. | Maintainer | GitHub ID | Affiliation | | ----------------- | ----------------------------------------------- | ----------- | | Tom McCarthy | [cakepietoast](https://github.com/cakepietoast) | MongoDB | -| Nicolas Moutschen | [nmoutschen](https://github.com/nmoutschen) | Amazon | +| Nicolas Moutschen | [nmoutschen](https://github.com/nmoutschen) | Apollo | ## Labels @@ -100,7 +99,9 @@ Be aware of recurring ambiguous situations and [document them](#common-scenarios ### Uphold Code of Conduct -Model the behavior set forward by the [Code of Conduct](CODE_OF_CONDUCT.md) and raise any violations to other maintainers and admins. There could be unusual circumstances where inappropriate behavior does not immediately fall within the [Code of Conduct](CODE_OF_CONDUCT.md). These might be nuanced and should be handled with extra care - when in doubt, do not engage and reach out to other maintainers and admins. +Model the behavior set forward by the [Code of Conduct](CODE_OF_CONDUCT.md) and raise any violations to other maintainers and admins. There could be unusual circumstances where inappropriate behavior does not immediately fall within the [Code of Conduct](CODE_OF_CONDUCT.md). + +These might be nuanced and should be handled with extra care - when in doubt, do not engage and reach out to other maintainers and admins. ### Prioritize Security @@ -152,14 +153,14 @@ RFC is a collaborative process to help us get to the most optimal solution given Make sure you ask these questions in mind when reviewing: -* Does it use our [RFC template](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=RFC%2Ctriage&template=rfc.yml&title=RFC%3A+TITLE)? -* Does the match our [Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets)? -* Does the proposal address the use case? If so, is the recommended usage explicit? -* Does it focus on the mechanics to solve the use case over fine-grained implementation details? -* Can anyone familiar with the code base implement it? -* If approved, are they interested in contributing? Do they need any guidance? -* Does this significantly increase the overall project maintenance? Do we have the skills to maintain it? -* If we can't take this use case, are there alternative projects we could recommend? Or does it call for a new project altogether? +- Does it use our [RFC template](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=RFC%2Ctriage&template=rfc.yml&title=RFC%3A+TITLE)? +- Does the match our [Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets)? +- Does the proposal address the use case? If so, is the recommended usage explicit? +- Does it focus on the mechanics to solve the use case over fine-grained implementation details? +- Can anyone familiar with the code base implement it? +- If approved, are they interested in contributing? Do they need any guidance? +- Does this significantly increase the overall project maintenance? Do we have the skills to maintain it? +- If we can't take this use case, are there alternative projects we could recommend? Or does it call for a new project altogether? When necessary, be upfront that the time to review, approve, and implement a RFC can vary - see [Contribution is stuck](#contribution-is-stuck). Some RFCs may be further updated after implementation, as certain areas become clearer. @@ -167,23 +168,15 @@ Some examples using our initial and new RFC templates: #92, #94, #95, #991, #122 ### Releasing a new version -> TODO: This is an area we want to increase automation while keeping communication at human level. - -Firstly, make sure you are using the `develop` branch and it is up to date with the origin. - -There are three main steps to release a new version: Changelog generation, version bumping, and drafting release notes. +Firstly, make sure the commit history in the `develop` branch **(1)** it's up to date, **(2)** commit messages are semantic, and **(3)** commit messages have their respective area, for example `feat(logger): `, `chore(ci): ...`). -#### Changelog generation +**Found typos or unclear commit messages?** -You can pre-generate a temporary CHANGELOG using `make changelog`. This will generate a `TMP_CHANGELOG.md` with all staged changes under the `unreleased` section. +Reword through rebase and push with `--force-with-lease` once you're confident. This will ensure [CHANGELOG](./CHANGELOG.md) is always clear for customers looking to understand what changed in between releases - was that a bug? what new features and for which utility? -Each unreleased line item is a commit. You can adjust them if you find the commit titles are insufficient to describe their intent. Once you're comfortable, bring these changes to the `CHANGELOG.md` with a new version heading like in previous versions. +**Looks good, what's next?** -#### Bumping the version - -Use `poetry version ` to bump the version. For example, you can use `poetry version minor` when releasing a minor version. - -NOTE. Make sure both `CHANGELOG` and `pyproject.toml` are committed and pushed to the remote `develop` branch before proceeding. +The only step is to draft and publish a good release notes, everything else is automated. #### Drafting release notes @@ -193,21 +186,35 @@ Make sure the `tag` field reflects the new version you're releasing, the target You'll notice we group all changes based on their [labels](#labels) like `feature`, `bug`, `documentation`, etc. -> **Q: What if there's an incorrect title or grouping?** +**I spotted a typo or incorrect grouping - how do I fix it?** Edit the respective PR title and update their [labels](#labels). Then run the [Release Drafter workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/release-drafter.yml) to update the Draft release. -The best part comes now. Replace the placeholder `[Human readable summary of changes]` with what you'd like to communicate to customers what this release is all about. Always put yourself in the customers shoes. For that, these are some questions to keep in mind when drafting your first or future release notes: +**All looking good, what's next?** + +The best part comes now. Replace the placeholder `[Human readable summary of changes]` with what you'd like to communicate to customers what this release is all about. Rule of thumb: always put yourself in the customers shoes. -* Can customers understand at a high level what changed in this release? -* Is there a link to the documentation where they can read more about each main change? -* Are there any graphics or code snippets that can enhance readability? -* Are we calling out any key contributor(s) to this release? - - All contributors are automatically credited, use this as an exceptional case to feature them +These are some questions to keep in mind when drafting your first or future release notes: -Once you're happy, hit `Publish release`. This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/publish.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be notified. +- Can customers understand at a high level what changed in this release? +- Is there a link to the documentation where they can read more about each main change? +- Are there any graphics or [code snippets](carbon.now.sh/) that can enhance readability? +- Are we calling out any key contributor(s) to this release? + - All contributors are automatically credited, use this as an exceptional case to feature them -> TODO: Wait for @am29d new Lambda Layers pipeline work to complete, then add how Lambda Layers are published +Once you're happy, hit `Publish release` 🎉🎉🎉. + +This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/publish.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be closed and notified. + +> TODO: Include information to verify SAR and Lambda Layers deployment; we're still finalizing Lambda Layer automated deployment in GitHub Actions - ping @am29d when in doubt. + +### Run end to end tests + +In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). + +To run locally, export `AWS_PROFILE` environment variable and run `make e2e tests`. To run from GitHub Actions, use [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. + +**NOTE**: E2E tests are run as part of each merge to `develop` branch. ### Releasing a documentation hotfix @@ -217,7 +224,7 @@ This workflow will update both user guide and API documentation. ### Maintain Overall Health of the Repo -> TODO: Coordinate removing `master` and renaming `develop` to `main` +> TODO: Coordinate renaming `develop` to `main` Keep the `develop` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. @@ -240,6 +247,7 @@ Actions that negatively impact the project will be handled by the admins, in coo In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). + ## Common scenarios These are recurring ambiguous situations that new and existing maintainers may encounter. They serve as guidance. It is up to each maintainer to follow, adjust, or handle in a different manner as long as [our conduct is consistent](#uphold-code-of-conduct) diff --git a/Makefile b/Makefile index 6173e3e310d..a024f340263 100644 --- a/Makefile +++ b/Makefile @@ -23,14 +23,17 @@ lint-docs-fix: docker run -v ${PWD}:/markdown 06kellyjac/markdownlint-cli --fix "docs" test: - poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=xml + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance unit-test: poetry run pytest tests/unit +e2e-test: + poetry run pytest -rP -n 3 --dist loadscope --durations=0 --durations-min=1 tests/e2e + coverage-html: - poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=html + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=html pre-commit: pre-commit run --show-diff-on-failure @@ -91,8 +94,9 @@ release: pr changelog: git fetch --tags origin - @echo "[+] Pre-generating CHANGELOG for tag: $$(git describe --abbrev=0 --tag)" - docker run -v "${PWD}":/workdir quay.io/git-chglog/git-chglog $$(git describe --abbrev=0 --tag).. > TMP_CHANGELOG.md + CURRENT_VERSION=$(shell git describe --abbrev=0 --tag) ;\ + echo "[+] Pre-generating CHANGELOG for tag: $$CURRENT_VERSION" ;\ + docker run -v "${PWD}":/workdir quay.io/git-chglog/git-chglog > CHANGELOG.md mypy: poetry run mypy --pretty aws_lambda_powertools diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index becfc9de85c..1f01015051c 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -1,3 +1,4 @@ +import inspect import json import logging import os @@ -286,3 +287,7 @@ def _strip_none_records(records: Dict[str, Any]) -> Dict[str, Any]: JsonFormatter = LambdaPowertoolsFormatter # alias to previous formatter + + +# Fetch current and future parameters from PowertoolsFormatter that should be reserved +RESERVED_FORMATTER_CUSTOM_KEYS: List[str] = inspect.getfullargspec(LambdaPowertoolsFormatter).args[1:] diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 157d53adf7e..f70224cabae 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -12,7 +12,7 @@ from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice from .exceptions import InvalidLoggerSamplingRateError from .filters import SuppressFilter -from .formatter import BasePowertoolsFormatter, LambdaPowertoolsFormatter +from .formatter import RESERVED_FORMATTER_CUSTOM_KEYS, BasePowertoolsFormatter, LambdaPowertoolsFormatter from .lambda_context import build_lambda_context_model logger = logging.getLogger(__name__) @@ -82,7 +82,7 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] datefmt: str, optional String directives (strftime) to format log timestamp using `time`, by default it uses RFC 3339. - use_datetime_directive: str, optional + use_datetime_directive: bool, optional Interpret `datefmt` as a format string for `datetime.datetime.strftime`, rather than `time.strftime`. @@ -368,7 +368,7 @@ def registered_handler(self) -> logging.Handler: return handlers[0] @property - def registered_formatter(self) -> PowertoolsFormatter: + def registered_formatter(self) -> BasePowertoolsFormatter: """Convenience property to access logger formatter""" return self.registered_handler.formatter # type: ignore @@ -395,7 +395,15 @@ def structure_logs(self, append: bool = False, **keys): is_logger_preconfigured = getattr(self._logger, "init", False) if not is_logger_preconfigured: formatter = self.logger_formatter or LambdaPowertoolsFormatter(**log_keys) # type: ignore - return self.registered_handler.setFormatter(formatter) + self.registered_handler.setFormatter(formatter) + + # when using a custom Lambda Powertools Formatter + # standard and custom keys that are not Powertools Formatter parameters should be appended + # and custom keys that might happen to be Powertools Formatter parameters should be discarded + # this prevents adding them as custom keys, for example, `json_default=` + # see https://github.com/awslabs/aws-lambda-powertools-python/issues/1263 + custom_keys = {k: v for k, v in log_keys.items() if k not in RESERVED_FORMATTER_CUSTOM_KEYS} + return self.registered_formatter.append_keys(**custom_keys) # Mode 2 (legacy) if append: diff --git a/aws_lambda_powertools/utilities/parser/models/sns.py b/aws_lambda_powertools/utilities/parser/models/sns.py index e329162e5c8..1b095fde2c4 100644 --- a/aws_lambda_powertools/utilities/parser/models/sns.py +++ b/aws_lambda_powertools/utilities/parser/models/sns.py @@ -31,8 +31,11 @@ class SnsNotificationModel(BaseModel): def check_sqs_protocol(cls, values): sqs_rewritten_keys = ("UnsubscribeURL", "SigningCertURL") if any(key in sqs_rewritten_keys for key in values): - values["UnsubscribeUrl"] = values.pop("UnsubscribeURL") - values["SigningCertUrl"] = values.pop("SigningCertURL") + # The sentinel value 'None' forces the validator to fail with + # ValidatorError instead of KeyError when the key is missing from + # the SQS payload + values["UnsubscribeUrl"] = values.pop("UnsubscribeURL", None) + values["SigningCertUrl"] = values.pop("SigningCertURL", None) return values diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index cf99b615a80..6943d6ed9bb 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -8,14 +8,14 @@ Event handler for Amazon API Gateway REST and HTTP APIs, and Application Loader ## Key Features * Lightweight routing to reduce boilerplate for API Gateway REST/HTTP API and ALB -* Seamless support for CORS, binary and Gzip compression -* Integrates with [Data classes utilities](../../utilities/data_classes.md){target="_blank"} to easily access event and identity information -* Built-in support for Decimals JSON encoding -* Support for dynamic path expressions -* Router to allow for splitting up the handler across multiple files +* Support for CORS, binary and Gzip compression, Decimals JSON encoding and bring your own JSON serializer +* Built-in integration with [Event Source Data Classes utilities](../../utilities/data_classes.md){target="_blank"} for self-documented event schema ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + ### Required resources You must have an existing [API Gateway Proxy integration](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html){target="_blank"} or [ALB](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html){target="_blank"} configured to invoke your Lambda function. @@ -25,54 +25,14 @@ This is the sample infrastructure for API Gateway we are using for the examples ???+ info "There is no additional permissions or dependencies required to use this utility." ```yaml title="AWS Serverless Application Model (SAM) example" -AWSTemplateFormatVersion: "2010-09-09" -Transform: AWS::Serverless-2016-10-31 -Description: Hello world event handler API Gateway - -Globals: - Api: - TracingEnabled: true - Cors: # see CORS section - AllowOrigin: "'https://example.com'" - AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" - MaxAge: "'300'" - BinaryMediaTypes: # see Binary responses section - - "*~1*" # converts to */* for any binary type - Function: - Timeout: 5 - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: my_api-service - -Resources: - ApiFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: api_handler/ - Description: API handler function - Events: - ApiEvent: - Type: Api - Properties: - # NOTE: this is a catch-all rule to simply the documentation. - # explicit routes and methods are recommended for prod instead - # for example, Path: /hello, Method: GET - Path: /{proxy+} # Send requests on any path to the lambda function - Method: ANY # Send requests using any http method to the lambda function +--8<-- "examples/event_handler_rest/sam/template.yaml" ``` ### Event Resolvers Before you decorate your functions to handle a given path and HTTP method(s), you need to initialize a resolver. -A resolver will handle request resolution, include [one or more routers](#split-routes-with-router), and give you access to the current event via typed properties. +A resolver will handle request resolution, including [one or more routers](#split-routes-with-router), and give you access to the current event via typed properties. For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, and `ALBResolver`. @@ -83,113 +43,29 @@ For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, a When using Amazon API Gateway REST API to front your Lambda functions, you can use `APIGatewayRestResolver`. -Here's an example on how we can handle the `/hello` path. +Here's an example on how we can handle the `/todos` path. ???+ info We automatically serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. -=== "app.py" - - ```python hl_lines="3 7 9 12 18" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("/hello") - @tracer.capture_method - def get_hello_universe(): - return {"message": "hello universe"} +=== "getting_started_rest_api_resolver.py" - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="5 11 14 28" + --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.py" ``` -=== "hello_event.json" + +=== "getting_started_rest_api_resolver.json" This utility uses `path` and `httpMethod` to route to the right function. This helps make unit tests and local invocation easier too. ```json hl_lines="4-5" - { - "body": "hello", - "resource": "/hello", - "path": "/hello", - "httpMethod": "GET", - "isBase64Encoded": false, - "queryStringParameters": { - "foo": "bar" - }, - "multiValueQueryStringParameters": {}, - "pathParameters": { - "hello": "/hello" - }, - "stageVariables": {}, - "headers": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", - "Accept-Encoding": "gzip, deflate, sdch", - "Accept-Language": "en-US,en;q=0.8", - "Cache-Control": "max-age=0", - "CloudFront-Forwarded-Proto": "https", - "CloudFront-Is-Desktop-Viewer": "true", - "CloudFront-Is-Mobile-Viewer": "false", - "CloudFront-Is-SmartTV-Viewer": "false", - "CloudFront-Is-Tablet-Viewer": "false", - "CloudFront-Viewer-Country": "US", - "Host": "1234567890.execute-api.us-east-1.amazonaws.com", - "Upgrade-Insecure-Requests": "1", - "User-Agent": "Custom User Agent String", - "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", - "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", - "X-Forwarded-For": "127.0.0.1, 127.0.0.2", - "X-Forwarded-Port": "443", - "X-Forwarded-Proto": "https" - }, - "multiValueHeaders": {}, - "requestContext": { - "accountId": "123456789012", - "resourceId": "123456", - "stage": "Prod", - "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", - "requestTime": "25/Jul/2020:12:34:56 +0000", - "requestTimeEpoch": 1428582896000, - "identity": { - "cognitoIdentityPoolId": null, - "accountId": null, - "cognitoIdentityId": null, - "caller": null, - "accessKey": null, - "sourceIp": "127.0.0.1", - "cognitoAuthenticationType": null, - "cognitoAuthenticationProvider": null, - "userArn": null, - "userAgent": "Custom User Agent String", - "user": null - }, - "path": "/Prod/hello", - "resourcePath": "/hello", - "httpMethod": "POST", - "apiId": "1234567890", - "protocol": "HTTP/1.1" - } - } + --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.json" ``` -=== "response.json" +=== "getting_started_rest_api_resolver_output.json" ```json - { - "statusCode": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": "{\"message\":\"hello universe\"}", - "isBase64Encoded": false - } + --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json" ``` #### API Gateway HTTP API @@ -199,477 +75,166 @@ When using Amazon API Gateway HTTP API to front your Lambda functions, you can u ???+ note Using HTTP API v1 payload? Use `APIGatewayRestResolver` instead. `APIGatewayHttpResolver` defaults to v2 payload. -Here's an example on how we can handle the `/hello` path. - -```python hl_lines="3 7" title="Using HTTP API resolver" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import APIGatewayHttpResolver - -tracer = Tracer() -logger = Logger() -app = APIGatewayHttpResolver() - -@app.get("/hello") -@tracer.capture_method -def get_hello_universe(): - return {"message": "hello universe"} - -# You can continue to use other utilities just as before -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="5 11" title="Using HTTP API resolver" +--8<-- "examples/event_handler_rest/src/getting_started_http_api_resolver.py" ``` #### Application Load Balancer -When using Amazon Application Load Balancer to front your Lambda functions, you can use `ALBResolver`. - -```python hl_lines="3 7" title="Using ALB resolver" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import ALBResolver - -tracer = Tracer() -logger = Logger() -app = ALBResolver() +When using Amazon Application Load Balancer (ALB) to front your Lambda functions, you can use `ALBResolver`. -@app.get("/hello") -@tracer.capture_method -def get_hello_universe(): - return {"message": "hello universe"} - -# You can continue to use other utilities just as before -@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="5 11" title="Using ALB resolver" +--8<-- "examples/event_handler_rest/src/getting_started_alb_api_resolver.py" ``` ### Dynamic routes -You can use `/path/{dynamic_value}` when configuring dynamic URL paths. This allows you to define such dynamic value as part of your function signature. - -=== "app.py" +You can use `/todos/` to configure dynamic URL paths, where `` will be resolved at runtime. - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver +Each dynamic route you set must be part of your function signature. This allows us to call your function using keyword arguments when matching your dynamic route. - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() +???+ note + For brevity, we will only include the necessary keys for each sample request for the example to work. - @app.get("/hello/") - @tracer.capture_method - def get_hello_you(name): - return {"message": f"hello {name}"} +=== "dynamic_routes.py" - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14 16" + --8<-- "examples/event_handler_rest/src/dynamic_routes.py" ``` -=== "sample_request.json" +=== "dynamic_routes.json" ```json - { - "resource": "/hello/{name}", - "path": "/hello/lessa", - "httpMethod": "GET", - ... - } - ``` - -#### Nested routes - -You can also nest paths as configured earlier in [our sample infrastructure](#required-resources): `/{message}/{name}`. - -=== "app.py" - - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("//") - @tracer.capture_method - def get_message(message, name): - return {"message": f"{message}, {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "examples/event_handler_rest/src/dynamic_routes.json" ``` -=== "sample_request.json" - - ```json - { - "resource": "/{message}/{name}", - "path": "/hi/michael", - "httpMethod": "GET", - ... - } - ``` +???+ tip + You can also nest dynamic paths, for example `/todos//`. #### Catch-all routes ???+ note We recommend having explicit routes whenever possible; use catch-all routes sparingly. -You can use a regex string to handle an arbitrary number of paths within a request, for example `.+`. +You can use a [regex](https://docs.python.org/3/library/re.html#regular-expression-syntax){target="_blank"} string to handle an arbitrary number of paths within a request, for example `.+`. You can also combine nested paths with greedy regex to catch in between routes. ???+ warning - We will choose the more explicit registered route that match incoming event. + We choose the most explicit registered route that matches an incoming event. -=== "app.py" +=== "dynamic_routes_catch_all.py" - ```python hl_lines="5" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - @app.get(".+") - def catch_any_route_after_any(): - return {"path_received": app.current_event.path} - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="11" + --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.py" ``` -=== "sample_request.json" +=== "dynamic_routes_catch_all.json" ```json - { - "resource": "/any/route/should/work", - "path": "/any/route/should/work", - "httpMethod": "GET", - ... - } + --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.json" ``` ### HTTP Methods -You can use named decorators to specify the HTTP method that should be handled in your functions. As well as the -`get` method already shown above, you can use `post`, `put`, `patch`, `delete`, and `patch`. - -=== "app.py" - - ```python hl_lines="9-10" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver +You can use named decorators to specify the HTTP method that should be handled in your functions. That is, `app.`, where the HTTP method could be `get`, `post`, `put`, `patch`, `delete`, and `options`. - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() +=== "http_methods.py" - # Only POST HTTP requests to the path /hello will route to this function - @app.post("/hello") - @tracer.capture_method - def get_hello_you(): - name = app.current_event.json_body.get("name") - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14 17" + --8<-- "examples/event_handler_rest/src/http_methods.py" ``` -=== "sample_request.json" +=== "http_methods.json" ```json - { - "resource": "/hello/{name}", - "path": "/hello/lessa", - "httpMethod": "GET", - ... - } + --8<-- "examples/event_handler_rest/src/http_methods.json" ``` -If you need to accept multiple HTTP methods in a single function, you can use the `route` method and pass a list of -HTTP methods. - -=== "app.py" - - ```python hl_lines="9-10" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() +If you need to accept multiple HTTP methods in a single function, you can use the `route` method and pass a list of HTTP methods. - # PUT and POST HTTP requests to the path /hello will route to this function - @app.route("/hello", method=["PUT", "POST"]) - @tracer.capture_method - def get_hello_you(): - name = app.current_event.json_body.get("name") - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) - ``` - -=== "sample_request.json" - - ```json - { - "resource": "/hello/{name}", - "path": "/hello/lessa", - "httpMethod": "GET", - ... - } - ``` +```python hl_lines="15" title="Handling multiple HTTP Methods" +--8<-- "examples/event_handler_rest/src/http_methods_multiple.py" +``` ???+ note - It is usually better to have separate functions for each HTTP method, as the functionality tends to differ depending on which method is used. + It is generally better to have separate functions for each HTTP method, as the functionality tends to differ depending on which method is used. ### Accessing request details -By integrating with [Data classes utilities](../../utilities/data_classes.md){target="_blank"}, you have access to request details, Lambda context and also some convenient methods. +Event Handler integrates with [Event Source Data Classes utilities](../../utilities/data_classes.md){target="_blank"}, and it exposes their respective resolver request details and convenient methods under `app.current_event`. -These are made available in the response returned when instantiating `APIGatewayRestResolver`, for example `app.current_event` and `app.lambda_context`. +That is why you see `app.resolve(event, context)` in every example. This allows Event Handler to resolve requests, and expose data like `app.lambda_context` and `app.current_event`. #### Query strings and payload -Within `app.current_event` property, you can access query strings as dictionary via `query_string_parameters`, or by name via `get_query_string_value` method. - -You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property. - -```python hl_lines="7-9 11" title="Accessing query strings, JSON payload, and raw payload" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver +Within `app.current_event` property, you can access all available query strings as a dictionary via `query_string_parameters`, or a specific one via `get_query_string_value` method. -app = APIGatewayRestResolver() +You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property - like the earlier example in the [HTTP Methods](#http-methods) section. -@app.get("/hello") -def get_hello_you(): - query_strings_as_dict = app.current_event.query_string_parameters - json_payload = app.current_event.json_body - payload = app.current_event.body - - name = app.current_event.get_query_string_value(name="name", default_value="") - return {"message": f"hello {name}"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="19 24" title="Accessing query strings and raw payload" +--8<-- "examples/event_handler_rest/src/accessing_request_details.py" ``` #### Headers Similarly to [Query strings](#query-strings-and-payload), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`. -```python hl_lines="7-8" title="Accessing HTTP Headers" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver() - -@app.get("/hello") -def get_hello_you(): - headers_as_dict = app.current_event.headers - name = app.current_event.get_header_value(name="X-Name", default_value="") - - return {"message": f"hello {name}"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="19" title="Accessing HTTP Headers" +--8<-- "examples/event_handler_rest/src/accessing_request_details_headers.py" ``` ### Handling not found routes By default, we return `404` for any unmatched route. -You can use **`not_found`** decorator to override this behaviour, and return a custom **`Response`**. - -```python hl_lines="11 13 16" title="Handling not found" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response -from aws_lambda_powertools.event_handler.exceptions import NotFoundError - -tracer = Tracer() -logger = Logger() -app = APIGatewayRestResolver() - -@app.not_found -@tracer.capture_method -def handle_not_found_errors(exc: NotFoundError) -> Response: - # Return 418 upon 404 errors - logger.info(f"Not found route: {app.current_event.path}") - return Response( - status_code=418, - content_type=content_types.TEXT_PLAIN, - body="I'm a teapot!" - ) - - -@app.get("/catch/me/if/you/can") -@tracer.capture_method -def catch_me_if_you_can(): - return {"message": "oh hey"} - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +You can use **`not_found`** decorator to override this behavior, and return a custom **`Response`**. + +```python hl_lines="14 18" title="Handling not found" +--8<-- "examples/event_handler_rest/src/not_found_routes.py" ``` ### Exception handling You can use **`exception_handler`** decorator with any Python exception. This allows you to handle a common exception outside your route, for example validation errors. -```python hl_lines="10 15" title="Exception handling" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - -tracer = Tracer() -logger = Logger() -app = APIGatewayRestResolver() - -@app.exception_handler(ValueError) -def handle_value_error(ex: ValueError): - metadata = {"path": app.current_event.path} - logger.error(f"Malformed request: {ex}", extra=metadata) - - return Response( - status_code=400, - content_type=content_types.TEXT_PLAIN, - body="Invalid request", - ) - - -@app.get("/hello") -@tracer.capture_method -def hello_name(): - name = app.current_event.get_query_string_value(name="name") - if name is not None: - raise ValueError("name query string must be present") - return {"message": f"hello {name}"} - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="14 15" title="Exception handling" +--8<-- "examples/event_handler_rest/src/exception_handling.py" ``` ### Raising HTTP errors -You can easily raise any HTTP Error back to the client using `ServiceError` exception. +You can easily raise any HTTP Error back to the client using `ServiceError` exception. This ensures your Lambda function doesn't fail but return the correct HTTP response signalling the error. ???+ info If you need to send custom headers, use [Response](#fine-grained-responses) class instead. -Additionally, we provide pre-defined errors for the most popular ones such as HTTP 400, 401, 404, 500. - -```python hl_lines="4-10 20 25 30 35 39" title="Raising common HTTP Status errors (4xx, 5xx)" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.event_handler.exceptions import ( - BadRequestError, - InternalServerError, - NotFoundError, - ServiceError, - UnauthorizedError, -) - -tracer = Tracer() -logger = Logger() - -app = APIGatewayRestResolver() - -@app.get(rule="/bad-request-error") -def bad_request_error(): - # HTTP 400 - raise BadRequestError("Missing required parameter") - -@app.get(rule="/unauthorized-error") -def unauthorized_error(): - # HTTP 401 - raise UnauthorizedError("Unauthorized") - -@app.get(rule="/not-found-error") -def not_found_error(): - # HTTP 404 - raise NotFoundError - -@app.get(rule="/internal-server-error") -def internal_server_error(): - # HTTP 500 - raise InternalServerError("Internal server error") - -@app.get(rule="/service-error", cors=True) -def service_error(): - raise ServiceError(502, "Something went wrong!") - # alternatively - # from http import HTTPStatus - # raise ServiceError(HTTPStatus.BAD_GATEWAY.value, "Something went wrong) - -def handler(event, context): - return app.resolve(event, context) +We provide pre-defined errors for the most popular ones such as HTTP 400, 401, 404, 500. + +```python hl_lines="6-11 23 28 33 38 43" title="Raising common HTTP Status errors (4xx, 5xx)" +--8<-- "examples/event_handler_rest/src/raising_http_errors.py" ``` ### Custom Domain API Mappings -When using Custom Domain API Mappings feature, you must use **`strip_prefixes`** param in the `APIGatewayRestResolver` constructor. - -Scenario: You have a custom domain `api.mydomain.dev` and set an API Mapping `payment` to forward requests to your Payments API, the path argument will be `/payment/`. +When using [Custom Domain API Mappings feature](https://docs.aws.amazon.com/apigateway/latest/developerguide/rest-api-mappings.html){target="_blank"}, you must use **`strip_prefixes`** param in the `APIGatewayRestResolver` constructor. -This will lead to a HTTP 404 despite having your Lambda configured correctly. See the example below on how to account for this change. +**Scenario**: You have a custom domain `api.mydomain.dev`. Then you set `/payment` API Mapping to forward any payment requests to your Payments API. -=== "app.py" +**Challenge**: This means your `path` value for any API requests will always contain `/payment/`, leading to HTTP 404 as Event Handler is trying to match what's after `payment/`. This gets further complicated with an [arbitrary level of nesting](https://github.com/awslabs/aws-lambda-powertools-roadmap/issues/34). - ```python hl_lines="7" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver +To address this API Gateway behavior, we use `strip_prefixes` parameter to account for these prefixes that are now injected into the path regardless of which type of API Gateway you're using. - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver(strip_prefixes=["/payment"]) +=== "custom_api_mapping.py" - @app.get("/subscriptions/") - @tracer.capture_method - def get_subscription(subscription): - return {"subscription_id": subscription} - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="8" + --8<-- "examples/event_handler_rest/src/custom_api_mapping.py" ``` -=== "sample_request.json" +=== "custom_api_mapping.json" ```json - { - "resource": "/subscriptions/{subscription}", - "path": "/payment/subscriptions/123", - "httpMethod": "GET", - ... - } + --8<-- "examples/event_handler_rest/src/custom_api_mapping.json" ``` ???+ note @@ -685,67 +250,21 @@ You can configure CORS at the `APIGatewayRestResolver` constructor via `cors` pa This will ensure that CORS headers are always returned as part of the response when your functions match the path invoked. -=== "app.py" - - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, CORSConfig - - tracer = Tracer() - logger = Logger() - - cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) - app = APIGatewayRestResolver(cors=cors_config) - - @app.get("/hello/") - @tracer.capture_method - def get_hello_you(name): - return {"message": f"hello {name}"} - - @app.get("/hello", cors=False) # optionally exclude CORS from response, if needed - @tracer.capture_method - def get_hello_no_cors_needed(): - return {"message": "hello, no CORS needed for this path ;)"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) - ``` +???+ tip + Optionally disable CORS on a per path basis with `cors=False` parameter. -=== "response.json" +=== "setting_cors.py" - ```json - { - "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "Access-Control-Allow-Origin": "https://www.example.com", - "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" - }, - "body": "{\"message\":\"hello lessa\"}", - "isBase64Encoded": false - } + ```python hl_lines="5 11-12 34" + --8<-- "examples/event_handler_rest/src/setting_cors.py" ``` -=== "response_no_cors.json" +=== "setting_cors_output.json" ```json - { - "statusCode": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": "{\"message\":\"hello lessa\"}", - "isBase64Encoded": false - } + --8<-- "examples/event_handler_rest/src/setting_cors_output.json" ``` -???+ tip - Optionally disable CORS on a per path basis with `cors=False` parameter. - #### Pre-flight Pre-flight (OPTIONS) calls are typically handled at the API Gateway level as per [our sample infrastructure](#required-resources), no Lambda integration necessary. However, ALB expects you to handle pre-flight requests. @@ -771,42 +290,17 @@ For convenience, these are the default values when using `CORSConfig` to enable You can use the `Response` class to have full control over the response, for example you might want to add additional headers or set a custom Content-type. -=== "app.py" - - ```python hl_lines="11-16" - import json - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - - app = APIGatewayRestResolver() - - @app.get("/hello") - def get_hello_you(): - payload = json.dumps({"message": "I'm a teapot"}) - custom_headers = {"X-Custom": "X-Value"} +=== "fine_grained_responses.py" - return Response( - status_code=418, - content_type="application/json", - body=payload, - headers=custom_headers, - ) - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="7 24-28" + --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` -=== "response.json" +=== "fine_grained_responses_output.json" ```json - { - "body": "{\"message\":\"I\'m a teapot\"}", - "headers": { - "Content-Type": "application/json", - "X-Custom": "X-Value" - }, - "isBase64Encoded": false, - "statusCode": 418 - } + --8<-- "examples/event_handler_rest/src/fine_grained_responses_output.json" + ``` ### Compress @@ -815,46 +309,22 @@ You can compress with gzip and base64 encode your responses via `compress` param ???+ warning The client must send the `Accept-Encoding` header, otherwise a normal response will be sent. -=== "app.py" - - ```python hl_lines="5 7" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - @app.get("/hello", compress=True) - def get_hello_you(): - return {"message": "hello universe"} +=== "compressing_responses.py" - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14" + --8<-- "examples/event_handler_rest/src/compressing_responses.py" ``` -=== "sample_request.json" +=== "compressing_responses.json" ```json - { - "headers": { - "Accept-Encoding": "gzip" - }, - "httpMethod": "GET", - "path": "/hello", - ... - } + --8<-- "examples/event_handler_rest/src/compressing_responses.json" ``` -=== "response.json" +=== "compressing_responses_output.json" ```json - { - "body": "H4sIAAAAAAACE6tWyk0tLk5MT1WyUspIzcnJVyjNyyxLLSpOVaoFANha8kEcAAAA", - "headers": { - "Content-Encoding": "gzip", - "Content-Type": "application/json" - }, - "isBase64Encoded": true, - "statusCode": 200 - } + --8<-- "examples/event_handler_rest/src/compressing_responses_output.json" ``` ### Binary responses @@ -866,91 +336,28 @@ Like `compress` feature, the client must send the `Accept` header with the corre ???+ warning This feature requires API Gateway to configure binary media types, see [our sample infrastructure](#required-resources) for reference. -=== "app.py" - - ```python hl_lines="4 7 11" - import os - from pathlib import Path - - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - - app = APIGatewayRestResolver() - logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() - - @app.get("/logo") - def get_logo(): - return Response(status_code=200, content_type="image/svg+xml", body=logo_file) +=== "binary_responses.py" - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14 20" + --8<-- "examples/event_handler_rest/src/binary_responses.py" ``` -=== "logo.svg" +=== "binary_responses_logo.svg" ```xml - - - - - - - - - - - - + --8<-- "examples/event_handler_rest/src/binary_responses_logo.svg" ``` -=== "sample_request.json" + +=== "binary_responses.json" ```json - { - "headers": { - "Accept": "image/svg+xml" - }, - "httpMethod": "GET", - "path": "/logo", - ... - } + --8<-- "examples/event_handler_rest/src/binary_responses.json" ``` -=== "response.json" +=== "binary_responses_output.json" ```json - { - "body": "H4sIAAAAAAACE3VXa2scRxD87ID/w+byKTCzN899yFZMLBLHYEMg4K9BHq0l4c2duDudZIf891TVrPwiMehmd+fR3dXV1eOnz+7/mpvjtNtfbzenK9+6VTNtyvbienN5uro9vLPD6tlPj797+r21zYtpM+3OD9vdSfPzxfbt1Lyc59v9QZ8aP7au9ab5482L5pf7m+3u0Pw+317al5um1cc31chJ07XONc9vr+eLxv3YNNby/P3x8ks3/Kq5vjhdvTr/MO3+xAu83OxPV1eHw83Jen13d9fexXa7u1wH59wam5clJ/fz9eb9fy304ziuNYulpyt3c79qPtTx8XePmuP1dPd8y4nGNdGlxg9h1ewPH+bpdDVtzt/Ok317Xt5f7ra3m4uTzXTXfLHyicyf7G/OC5bf7Kb9tDtOKwXGI5rDhxtMHKb7w7rs95x41O4P7u931/N88sOv+vfkn/rV66vd3c7TyXScNtuLiydlvr75+su3O5+uZYkmL3n805vzw1VT5vM9cIOpVQM8Xw9dm0yHn+JMbHvj+IoRiJuhHYtrBxPagPfBpLbDmmD6NuB7NpxzWttpDG3EKd46vAfr29HE2XZtxMYABx4VzIxY2VmvnaMN2jkW642zAdPZRkyms76DndGZPpthgEt9MvB0wEJM91gacUpsvc3c3eO4sYXJHuf52A42jNjEp2qXRzjrMzaENtngLGOwCS4krO7xzXscoIeR4WFLNpFbEo7GNrhdOhkEGElrgUyCx3gokQYAHMOLxjvFVY1XVDNQy0AKkx4PgPSIjcALv8QDf0He9NZ3BaEFhTdgInESMPKBMwAemzxTZT1zgFP5vRekOJTg8zucquEvCULsXOx1hjY5bWKuAh1fFkbuIGABa71+4cuRcMHfuiboMB6Kw8gGW5mQtDUwBa1f4s/Kd6+1iD8oplyIvq9oebEFYBOKsXi+ORNEJBKLbBhaXzIcZ0YGbgMF9IAkdG9I4Y/N65RhaYCLi+morPSipK8RMlmdIgahbFR+s2UF+Gpe3ieip6/kayCbkHpYRUp6QgH6MGFEgLuiFQHbviLO/DkdEGkbk4ljsawtR7J1zIAFk0aTioBBpIQYbmWNJArqKQlXxh9UoSQXjZxFIGoGFmzSPM/8FD+w8IDNmxG+l1pwlr5Ey/rwzP1gay1mG5Ykj6/GrpoIRZOMYqR3GiudHijAFJPJiePVCGBr2mIlE0bEUKpIMFrQwjCEcQabB4pOmJVyPolCYWEnYJZVyU+VE4JrQC56cPWtpfSVHfhkJD60RDy6foYyRNv1NZlCXoh/YwM05C7rEU0sitKERehqrLkiYCrhvcSO53VFrzxeAqB0UxHzbMFPb/q+1ltVRoITiTnNKRWm0ownRlbpFUu/iI5uYRMEoMb/kLt+yR3BSq98xtkQXElWl5h1yg6nvcz5SrVFta1UHTz3v4koIEzIVPgRKlkkc44ykipJsip7kVMWdICDFPBMMoOwUhlbRb23NX/UjqHYesi4sK2OmDhaWpLKiE1YzxbCsUhATZUlb2q7iBX7Kj/Kc80atEz66yWyXorhGTIkRqnrSURu8fWhdNIFKT7B8UnNJPIUwYLgLVHkOD7knC4rjNpFeturrBRRbmtHkpTh5VVIncmBnYlpjhT3HhMUd1urK0rQE7AE14goJdFRWBYZHyUIcLLm3AuhwF5qO7Zg4B+KTodiJCaSOMN4SXbRC+pR1Vs8FEZGOcnCtKvNvnC/aoiKj2+dekO1GdS4VMfAQo2++KXOonIgf5ifoo6hOkm6EFDP8pItNXvVpFNdxiNErThVXG1UQXHEz/eEYWk/jEmCRcyyaKtWKbVSr1YNc6rytcLnq6AORazytbMa9nqOutgYdUPmGL72nyKmlzxMVcjpPLPdE7cC1MlQQkpyZHasjPbRFVpJ+mNPqlcln6Tekk5lg7cd/9CbJMkkXFInSmrcw4PHQS1p0HZSANa6s8CqNiN/Qh7hI0vVfK7aj6u1Lnq67n173/P1vhd6Nf+ETgJLgSyjjYGpj2SVD3JM96PM+xRRZYcMtV8NJHKn3bW+pUydGMFg1CMelUSIgjwj4nGUVULDxxJJM1zvsM/q0uZ5TQggwFnoRanI9h76gcSJDPYLz5dA/y/EgXnygRcGostStqFXv0KdD7qP6MYUTKVXr1uhEzty8QP5plqDXbZuk1mtuUZGv3jtg8JIFKHTJrt6H9AduN4TAE6q95qzMEikMmkVRq+bKQXrC0cfUrdm7h5+8b8YjP8Cgadmu5INAAA=", - "headers": { - "Content-Type": "image/svg+xml" - }, - "isBase64Encoded": true, - "statusCode": 200 - } + --8<-- "examples/event_handler_rest/src/binary_responses_output.json" ``` ### Debug mode @@ -964,326 +371,88 @@ This will enable full tracebacks errors in the response, print request and respo It's best to use for local development only! -```python hl_lines="3" title="Enabling debug mode" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver(debug=True) - -@app.get("/hello") -def get_hello_universe(): - return {"message": "hello universe"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="11" title="Enabling debug mode" +--8<-- "examples/event_handler_rest/src/debug_mode.py" ``` ### Custom serializer You can instruct API Gateway handler to use a custom serializer to best suit your needs, for example take into account Enums when serializing. -```python hl_lines="21-22 26" title="Using a custom JSON serializer for responses" -import json -from enum import Enum -from json import JSONEncoder -from typing import Dict - -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -class CustomEncoder(JSONEncoder): - """Your customer json encoder""" - def default(self, obj): - if isinstance(obj, Enum): - return obj.value - try: - iterable = iter(obj) - except TypeError: - pass - else: - return sorted(iterable) - return JSONEncoder.default(self, obj) - -def custom_serializer(obj) -> str: - """Your custom serializer function APIGatewayRestResolver will use""" - return json.dumps(obj, cls=CustomEncoder) - -# Assigning your custom serializer -app = APIGatewayRestResolver(serializer=custom_serializer) - -class Color(Enum): - RED = 1 - BLUE = 2 - -@app.get("/colors") -def get_color() -> Dict: - return { - # Color.RED will be serialized to 1 as expected now - "color": Color.RED, - "variations": {"light", "dark"}, - } +```python hl_lines="35 40" title="Using a custom JSON serializer for responses" +--8<-- "examples/event_handler_rest/src/custom_serializer.py" ``` ### Split routes with Router As you grow the number of routes a given Lambda function should handle, it is natural to split routes into separate files to ease maintenance - That's where the `Router` feature is useful. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `users.py`, this is how you'd use the `Router` feature. +Let's assume you have `split_route.py` as your Lambda function entrypoint and routes in `split_route_module.py`. This is how you'd use the `Router` feature. -=== "users.py" +=== "split_route_module.py" We import **Router** instead of **APIGatewayRestResolver**; syntax wise is exactly the same. - ```python hl_lines="5 8 12 15 21" - import itertools - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - logger = Logger(child=True) - router = Router() - USERS = {"user1": "details_here", "user2": "details_here", "user3": "details_here"} - - - @router.get("/users") - def get_users() -> Dict: - # /users?limit=1 - pagination_limit = router.current_event.get_query_string_value(name="limit", default_value=10) - - logger.info(f"Fetching the first {pagination_limit} users...") - ret = dict(itertools.islice(USERS.items(), int(pagination_limit))) - return {"items": [ret]} - - @router.get("/users/") - def get_user(username: str) -> Dict: - logger.info(f"Fetching username {username}") - return {"details": USERS.get(username, {})} - - # many other related /users routing + ```python hl_lines="5 13 16 25 28" + --8<-- "examples/event_handler_rest/src/split_route_module.py" ``` -=== "app.py" +=== "split_route.py" We use `include_router` method and include all user routers registered in the `router` global object. - ```python hl_lines="7 10-11" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.typing import LambdaContext - - import users - - logger = Logger() - app = APIGatewayRestResolver() - app.include_router(users.router) - - - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + ```python hl_lines="11" + --8<-- "examples/event_handler_rest/src/split_route.py" ``` #### Route prefix -In the previous example, `users.py` routes had a `/users` prefix. This might grow over time and become repetitive. - -When necessary, you can set a prefix when including a router object. This means you could remove `/users` prefix in `users.py` altogether. - -=== "app.py" +In the previous example, `split_route_module.py` routes had a `/todos` prefix. This might grow over time and become repetitive. - ```python hl_lines="9" - from typing import Dict +When necessary, you can set a prefix when including a router object. This means you could remove `/todos` prefix altogether. - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.typing import LambdaContext +=== "split_route_prefix.py" - import users - - app = APIGatewayRestResolver() - app.include_router(users.router, prefix="/users") # prefix '/users' to any route in `users.router` - - - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + ```python hl_lines="12" + --8<-- "examples/event_handler_rest/src/split_route_prefix.py" ``` -=== "users.py" - - ```python hl_lines="11 15" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - logger = Logger(child=True) - router = Router() - USERS = {"user1": "details", "user2": "details", "user3": "details"} - - - @router.get("/") # /users, when we set the prefix in app.py - def get_users() -> Dict: - ... - - @router.get("/") - def get_user(username: str) -> Dict: - ... +=== "split_route_prefix_module.py" - # many other related /users routing + ```python hl_lines="13 25" + --8<-- "examples/event_handler_rest/src/split_route_prefix_module.py" ``` #### Sample layout -This sample project contains a Users function with two distinct set of routes, `/users` and `/health`. The layout optimizes for code sharing, no custom build tooling, and it uses [Lambda Layers](../../index.md#lambda-layer) to install Lambda Powertools. - -=== "Project layout" - - ```python hl_lines="1 8 10 12-15" - . - ├── Pipfile # project app & dev dependencies; poetry, pipenv, etc. - ├── Pipfile.lock - ├── README.md - ├── src - │ ├── __init__.py - │ ├── requirements.txt # sam build detect it automatically due to CodeUri: src, e.g. pipenv lock -r > src/requirements.txt - │ └── users - │ ├── __init__.py - │ ├── main.py # this will be our users Lambda fn; it could be split in folders if we want separate fns same code base - │ └── routers # routers module - │ ├── __init__.py - │ ├── health.py # /users routes, e.g. from routers import users; users.router - │ └── users.py # /users routes, e.g. from .routers import users; users.router - ├── template.yml # SAM template.yml, CodeUri: src, Handler: users.main.lambda_handler - └── tests +This is a sample project layout for a monolithic function with routes split in different files (`/todos`, `/health`). + +```shell hl_lines="4 7 10 12-13" title="Sample project layout" +. +├── pyproject.toml # project app & dev dependencies; poetry, pipenv, etc. +├── poetry.lock +├── src +│ ├── __init__.py +│ ├── requirements.txt # sam build detect it automatically due to CodeUri: src. poetry export --format src/requirements.txt +│ └── todos +│ ├── __init__.py +│ ├── main.py # this will be our todos Lambda fn; it could be split in folders if we want separate fns same code base +│ └── routers # routers module +│ ├── __init__.py +│ ├── health.py # /health routes. from routers import todos; health.router +│ └── todos.py # /todos routes. from .routers import todos; todos.router +├── template.yml # SAM. CodeUri: src, Handler: todos.main.lambda_handler +└── tests + ├── __init__.py + ├── unit + │ ├── __init__.py + │ └── test_todos.py # unit tests for the todos router + │ └── test_health.py # unit tests for the health router + └── functional ├── __init__.py - ├── unit - │ ├── __init__.py - │ └── test_users.py # unit tests for the users router - │ └── test_health.py # unit tests for the health router - └── functional - ├── __init__.py - ├── conftest.py # pytest fixtures for the functional tests - └── test_main.py # functional tests for the main lambda handler - ``` - -=== "template.yml" - - ```yaml hl_lines="22-23" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Example service with multiple routes - Globals: - Function: - Timeout: 10 - MemorySize: 512 - Runtime: python3.9 - Tracing: Active - Architectures: - - x86_64 - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: users - Resources: - UsersService: - Type: AWS::Serverless::Function - Properties: - Handler: users.main.lambda_handler - CodeUri: src - Layers: - # Latest version: https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:4 - Events: - ByUser: - Type: Api - Properties: - Path: /users/{name} - Method: GET - AllUsers: - Type: Api - Properties: - Path: /users - Method: GET - HealthCheck: - Type: Api - Properties: - Path: /status - Method: GET - Outputs: - UsersApiEndpoint: - Description: "API Gateway endpoint URL for Prod environment for Users Function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod" - AllUsersURL: - Description: "URL to fetch all registered users" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users" - ByUserURL: - Description: "URL to retrieve details by user" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users/test" - UsersServiceFunctionArn: - Description: "Users Lambda Function ARN" - Value: !GetAtt UsersService.Arn - ``` - -=== "src/users/main.py" - - ```python hl_lines="8 14-15" - from typing import Dict - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.logging.correlation_paths import APPLICATION_LOAD_BALANCER - from aws_lambda_powertools.utilities.typing import LambdaContext - - from .routers import health, users - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - app.include_router(health.router) - app.include_router(users.router) - - - @logger.inject_lambda_context(correlation_id_path=API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) - ``` - -=== "src/users/routers/health.py" - - ```python hl_lines="4 6-7 10" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - router = Router() - logger = Logger(child=True) - - - @router.get("/status") - def health() -> Dict: - logger.debug("Health check called") - return {"status": "OK"} - ``` - -=== "tests/functional/test_users.py" - - ```python hl_lines="3" - import json - - from src.users import main # follows namespace package from root - - - def test_lambda_handler(apigw_event, lambda_context): - ret = main.lambda_handler(apigw_event, lambda_context) - expected = json.dumps({"message": "hello universe"}, separators=(",", ":")) - - assert ret["statusCode"] == 200 - assert ret["body"] == expected - ``` + ├── conftest.py # pytest fixtures for the functional tests + └── test_main.py # functional tests for the main lambda handler +``` ### Considerations @@ -1340,55 +509,16 @@ your development, building, deployment tooling need to accommodate the distinct You can test your routes by passing a proxy event request where `path` and `httpMethod`. -=== "test_app.py" +=== "assert_http_response.py" - ```python hl_lines="18-24" - from dataclasses import dataclass - - import pytest - import app - - @pytest.fixture - def lambda_context(): - @dataclass - class LambdaContext: - function_name: str = "test" - memory_limit_in_mb: int = 128 - invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" - aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" - - return LambdaContext() - - def test_lambda_handler(lambda_context): - minimal_event = { - "path": "/hello", - "httpMethod": "GET", - "requestContext": { # correlation ID - "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef" - } - } - - app.lambda_handler(minimal_event, lambda_context) + ```python hl_lines="21-24" + --8<-- "examples/event_handler_rest/src/assert_http_response.py" ``` -=== "app.py" +=== "assert_http_response_module.py" ```python - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - logger = Logger() - app = APIGatewayRestResolver() # API Gateway REST API (v1) - - @app.get("/hello") - def get_hello_universe(): - return {"message": "hello universe"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "examples/event_handler_rest/src/assert_http_response_module.py" ``` ## FAQ diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index f3203e37834..4d28b41a81f 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -5,7 +5,7 @@ description: Core utility Event handler for AWS AppSync Direct Lambda Resolver and Amplify GraphQL Transformer. -### Key Features +## Key Features * Automatically parse API arguments to function arguments * Choose between strictly match a GraphQL field name or all of them to a function @@ -30,144 +30,16 @@ This is the sample infrastructure we are using for the initial examples with a A ???+ tip "Tip: Designing GraphQL Schemas for the first time?" Visit [AWS AppSync schema documentation](https://docs.aws.amazon.com/appsync/latest/devguide/designing-your-schema.html){target="_blank"} for understanding how to define types, nesting, and pagination. -=== "schema.graphql" +=== "getting_started_schema.graphql" ```typescript - --8<-- "docs/shared/getting_started_schema.graphql" + --8<-- "examples/event_handler_graphql/src/getting_started_schema.graphql" ``` === "template.yml" - ```yaml hl_lines="37-42 50-55 61-62 78-91 96-120" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Hello world Direct Lambda Resolver - - Globals: - Function: - Timeout: 5 - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_SERVICE_NAME: sample_resolver - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Description: Sample Lambda Powertools Direct Lambda Resolver - Tags: - SOLUTION: LambdaPowertoolsPython - - # IAM Permissions and Roles - - AppSyncServiceRole: - Type: "AWS::IAM::Role" - Properties: - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - - Effect: "Allow" - Principal: - Service: - - "appsync.amazonaws.com" - Action: - - "sts:AssumeRole" - - InvokeLambdaResolverPolicy: - Type: "AWS::IAM::Policy" - Properties: - PolicyName: "DirectAppSyncLambda" - PolicyDocument: - Version: "2012-10-17" - Statement: - - - Effect: "Allow" - Action: "lambda:invokeFunction" - Resource: - - !GetAtt HelloWorldFunction.Arn - Roles: - - !Ref AppSyncServiceRole - - # GraphQL API - - HelloWorldApi: - Type: "AWS::AppSync::GraphQLApi" - Properties: - Name: HelloWorldApi - AuthenticationType: "API_KEY" - XrayEnabled: true - - HelloWorldApiKey: - Type: AWS::AppSync::ApiKey - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - - HelloWorldApiSchema: - Type: "AWS::AppSync::GraphQLSchema" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - Definition: | - schema { - query:Query - } - - type Query { - getTodo(id: ID!): Todo - listTodos: [Todo] - } - - type Todo { - id: ID! - title: String - description: String - done: Boolean - } - - # Lambda Direct Data Source and Resolver - - HelloWorldFunctionDataSource: - Type: "AWS::AppSync::DataSource" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - Name: "HelloWorldLambdaDirectResolver" - Type: "AWS_LAMBDA" - ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn - LambdaConfig: - LambdaFunctionArn: !GetAtt HelloWorldFunction.Arn - - ListTodosResolver: - Type: "AWS::AppSync::Resolver" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - TypeName: "Query" - FieldName: "listTodos" - DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name - - GetTodoResolver: - Type: "AWS::AppSync::Resolver" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - TypeName: "Query" - FieldName: "getTodo" - DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name - - - Outputs: - HelloWorldFunction: - Description: "Hello World Lambda Function ARN" - Value: !GetAtt HelloWorldFunction.Arn - - HelloWorldAPI: - Value: !GetAtt HelloWorldApi.Arn + ```yaml hl_lines="59-60 71-72 94-95 104-105 112-113" + --8<-- "examples/event_handler_graphql/sam/template.yaml" ``` ### Resolver decorator @@ -176,248 +48,86 @@ You can define your functions to match GraphQL types and fields with the `app.re Here's an example where we have two separate functions to resolve `getTodo` and `listTodos` fields within the `Query` type. For completion, we use Scalar type utilities to generate the right output based on our schema definition. -???+ info - GraphQL arguments are passed as function arguments. +???+ important + GraphQL arguments are passed as function keyword arguments. -=== "app.py" + **Example** - ```python hl_lines="3-5 9 31-32 39-40 47" - from aws_lambda_powertools import Logger, Tracer + The GraphQL Query `getTodo(id: "todo_id_value")` will + call `get_todo` as `get_todo(id="todo_id_value")`. - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +=== "getting_started_graphql_api_resolver.py" - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() + ```python hl_lines="7 13 23 25-26 35 37 48" + --8<-- "examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py" + ``` - # Note that `creation_time` isn't available in the schema - # This utility also takes into account what info you make available at API level vs what's stored - TODOS = [ - { - "id": scalar_types_utils.make_id(), # type ID or String - "title": "First task", - "description": "String", - "done": False, - "creation_time": scalar_types_utils.aws_datetime(), # type AWSDateTime - }, - { - "id": scalar_types_utils.make_id(), - "title": "Second task", - "description": "String", - "done": True, - "creation_time": scalar_types_utils.aws_datetime(), - }, - ] +=== "getting_started_schema.graphql" + ```typescript hl_lines="6-7" + --8<-- "examples/event_handler_graphql/src/getting_started_schema.graphql" + ``` - @app.resolver(type_name="Query", field_name="getTodo") - def get_todo(id: str = ""): - logger.info(f"Fetching Todo {id}") - todo = [todo for todo in TODOS if todo["id"] == id] +=== "getting_started_get_todo.json" - return todo + ```json hl_lines="2-3" + --8<-- "examples/event_handler_graphql/src/getting_started_get_todo.json" + ``` +=== "getting_started_list_todos.json" - @app.resolver(type_name="Query", field_name="listTodos") - def list_todos(): - return TODOS + ```json hl_lines="2 40 42" + --8<-- "examples/event_handler_graphql/src/getting_started_list_todos.json" + ``` +### Scalar functions - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) - ``` +When working with [AWS AppSync Scalar types](https://docs.aws.amazon.com/appsync/latest/devguide/scalars.html){target="_blank"}, you might want to generate the same values for data validation purposes. -=== "schema.graphql" +For convenience, the most commonly used values are available as functions within `scalar_types_utils` module. - ```typescript - --8<-- "docs/shared/getting_started_schema.graphql" - ``` +```python hl_lines="1-6" title="Creating key scalar values with scalar_types_utils" +--8<-- "examples/event_handler_graphql/src/scalar_functions.py" +``` -=== "getTodo_event.json" - - ```json - { - "arguments": { - "id": "7e362732-c8cd-4405-b090-144ac9b38960" - }, - "identity": null, - "source": null, - "request": { - "headers": { - "x-forwarded-for": "1.2.3.4, 5.6.7.8", - "accept-encoding": "gzip, deflate, br", - "cloudfront-viewer-country": "NL", - "cloudfront-is-tablet-viewer": "false", - "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", - "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", - "cloudfront-forwarded-proto": "https", - "origin": "https://eu-west-1.console.aws.amazon.com", - "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", - "content-type": "application/json", - "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", - "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", - "content-length": "114", - "x-amz-user-agent": "AWS-Console-AppSync/", - "x-forwarded-proto": "https", - "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", - "accept-language": "en-US,en;q=0.5", - "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", - "cloudfront-is-desktop-viewer": "true", - "cloudfront-is-mobile-viewer": "false", - "accept": "*/*", - "x-forwarded-port": "443", - "cloudfront-is-smarttv-viewer": "false" - } - }, - "prev": null, - "info": { - "parentTypeName": "Query", - "selectionSetList": [ - "title", - "id" - ], - "selectionSetGraphQL": "{\n title\n id\n}", - "fieldName": "getTodo", - "variables": {} - }, - "stash": {} - } - ``` +Here's a table with their related scalar as a quick reference: -=== "listTodos_event.json" - - ```json - { - "arguments": {}, - "identity": null, - "source": null, - "request": { - "headers": { - "x-forwarded-for": "1.2.3.4, 5.6.7.8", - "accept-encoding": "gzip, deflate, br", - "cloudfront-viewer-country": "NL", - "cloudfront-is-tablet-viewer": "false", - "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", - "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", - "cloudfront-forwarded-proto": "https", - "origin": "https://eu-west-1.console.aws.amazon.com", - "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", - "content-type": "application/json", - "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", - "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", - "content-length": "114", - "x-amz-user-agent": "AWS-Console-AppSync/", - "x-forwarded-proto": "https", - "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", - "accept-language": "en-US,en;q=0.5", - "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", - "cloudfront-is-desktop-viewer": "true", - "cloudfront-is-mobile-viewer": "false", - "accept": "*/*", - "x-forwarded-port": "443", - "cloudfront-is-smarttv-viewer": "false" - } - }, - "prev": null, - "info": { - "parentTypeName": "Query", - "selectionSetList": [ - "id", - "title" - ], - "selectionSetGraphQL": "{\n id\n title\n}", - "fieldName": "listTodos", - "variables": {} - }, - "stash": {} - } - ``` +| Scalar type | Scalar function | Sample value | +| ---------------- | ---------------------------------- | -------------------------------------- | +| **ID** | `scalar_types_utils.make_id` | `e916c84d-48b6-484c-bef3-cee3e4d86ebf` | +| **AWSDate** | `scalar_types_utils.aws_date` | `2022-07-08Z` | +| **AWSTime** | `scalar_types_utils.aws_time` | `15:11:00.189Z` | +| **AWSDateTime** | `scalar_types_utils.aws_datetime` | `2022-07-08T15:11:00.189Z` | +| **AWSTimestamp** | `scalar_types_utils.aws_timestamp` | `1657293060` | ## Advanced ### Nested mappings -You can nest `app.resolver()` decorator multiple times when resolving fields with the same return. - -=== "nested_mappings.py" - - ```python hl_lines="4 8 10-12 18" - from aws_lambda_powertools import Logger, Tracer +???+ note - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver + The following examples use a more advanced schema. These schemas differ from [initial sample infrastructure we used earlier](#required-resources). - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() +You can nest `app.resolver()` decorator multiple times when resolving fields with the same return value. - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - return name + description +=== "nested_mappings.py" - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="4 10 20-21 23 30" + --8<-- "examples/event_handler_graphql/src/nested_mappings.py" ``` -=== "schema.graphql" +=== "nested_mappings_schema.graphql" ```typescript hl_lines="6 20" - schema { - query: Query - } - - type Query { - listLocations: [Location] - } - - type Location { - id: ID! - name: String! - description: String - address: String - } - - type Merchant { - id: String! - name: String! - description: String - locations: [Location] - } + --8<-- "examples/event_handler_graphql/src/nested_mappings_schema.graphql" ``` ### Async functions For Lambda Python3.8+ runtime, this utility supports async functions when you use in conjunction with `asyncio.run`. -```python hl_lines="5 9 11-13 21" title="Resolving GraphQL resolvers async" -import asyncio -from aws_lambda_powertools import Logger, Tracer - -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import AppSyncResolver - -tracer = Tracer(service="sample_resolver") -logger = Logger(service="sample_resolver") -app = AppSyncResolver() - -@app.resolver(type_name="Query", field_name="listTodos") -async def list_todos(): - todos = await some_async_io_call() - return todos - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - result = app.resolve(event, context) - - return asyncio.run(result) +```python hl_lines="7 14 24-25 34 36" title="Resolving GraphQL resolvers async" +--8<-- "examples/event_handler_graphql/src/async_resolvers.py" ``` ### Amplify GraphQL Transformer @@ -427,29 +137,7 @@ Assuming you have [Amplify CLI installed](https://docs.amplify.aws/cli/start/ins ```typescript hl_lines="7 15 20 22" title="Example GraphQL Schema" -@model -type Merchant { - id: String! - name: String! - description: String - # Resolves to `common_field` - commonField: String @function(name: "merchantInfo-${env}") -} - -type Location { - id: ID! - name: String! - address: String - # Resolves to `common_field` - commonField: String @function(name: "merchantInfo-${env}") -} - -type Query { - # List of locations resolves to `list_locations` - listLocations(page: Int, size: Int): [Location] @function(name: "merchantInfo-${env}") - # List of locations resolves to `list_locations` - findMerchant(search: str): [Merchant] @function(name: "searchMerchant-${env}") -} +--8<-- "examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql" ``` [Create two new basic Python functions](https://docs.amplify.aws/cli/function#set-up-a-function){target="_blank"} via `amplify add function`. @@ -457,257 +145,60 @@ type Query { ???+ note Amplify CLI generated functions use `Pipenv` as a dependency manager. Your function source code is located at **`amplify/backend/function/your-function-name`**. -Within your function's folder, add Lambda Powertools as a dependency with `pipenv install aws-lambda-powertools`. +Within your function's folder, add Powertools as a dependency with `pipenv install aws-lambda-powertools`. Use the following code for `merchantInfo` and `searchMerchant` functions respectively. -=== "merchantInfo/src/app.py" - - ```python hl_lines="4-5 9 11-12 15-16 23" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - tracer = Tracer(service="sample_graphql_transformer_resolver") - logger = Logger(service="sample_graphql_transformer_resolver") - app = AppSyncResolver() +=== "graphql_transformer_merchant_info.py" - @app.resolver(type_name="Query", field_name="listLocations") - def list_locations(page: int = 0, size: int = 10): - return [{"id": 100, "name": "Smooth Grooves"}] + ```python hl_lines="4 6 22-23 27-28 36" + --8<-- "examples/event_handler_graphql/src/graphql_transformer_merchant_info.py" + ``` - @app.resolver(field_name="commonField") - def common_field(): - # Would match all fieldNames matching 'commonField' - return scalar_types_utils.make_id() +=== "graphql_transformer_search_merchant.py" - @tracer.capture_lambda_handler - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - def lambda_handler(event, context): - app.resolve(event, context) - ``` -=== "searchMerchant/src/app.py" - - ```python hl_lines="1 4 6-7" - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - app = AppSyncResolver() - - @app.resolver(type_name="Query", field_name="findMerchant") - def find_merchant(search: str): - return [ - { - "id": scalar_types_utils.make_id(), - "name": "Brewer Brewing", - "description": "Mike Brewer's IPA brewing place" - }, - { - "id": scalar_types_utils.make_id(), - "name": "Serverlessa's Bakery", - "description": "Lessa's sourdough place" - }, - ] + ```python hl_lines="4 6 21-22 36 42" + --8<-- "examples/event_handler_graphql/src/graphql_transformer_search_merchant.py" ``` -**Example AppSync GraphQL Transformer Function resolver events** - -=== "Query.listLocations event" +=== "graphql_transformer_list_locations.json" ```json hl_lines="2-7" - { - "typeName": "Query", - "fieldName": "listLocations", - "arguments": { - "page": 2, - "size": 1 - }, - "identity": { - "claims": { - "iat": 1615366261 - ... - }, - "username": "mike", - ... - }, - "request": { - "headers": { - "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", - "x-forwarded-for": "127.0.0.1" - ... - } - }, - ... - } + --8<-- "examples/event_handler_graphql/src/graphql_transformer_list_locations.json" ``` -=== "*.commonField event" +=== "graphql_transformer_common_field.json" ```json hl_lines="2 3" - { - "typeName": "Merchant", - "fieldName": "commonField", - "arguments": { - }, - "identity": { - "claims": { - "iat": 1615366261 - ... - }, - "username": "mike", - ... - }, - "request": { - "headers": { - "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", - "x-forwarded-for": "127.0.0.1" - ... - } - }, - ... - } + --8<-- "examples/event_handler_graphql/src/graphql_transformer_common_field.json" ``` -=== "Query.findMerchant event" +=== "graphql_transformer_find_merchant.json" ```json hl_lines="2-6" - { - "typeName": "Query", - "fieldName": "findMerchant", - "arguments": { - "search": "Brewers Coffee" - }, - "identity": { - "claims": { - "iat": 1615366261 - ... - }, - "username": "mike", - ... - }, - "request": { - "headers": { - "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", - "x-forwarded-for": "127.0.0.1" - ... - } - }, - ... - } + --8<-- "examples/event_handler_graphql/src/graphql_transformer_find_merchant.json" ``` ### Custom data models -You can subclass `AppSyncResolverEvent` to bring your own set of methods to handle incoming events, by using `data_model` param in the `resolve` method. - -=== "custom_model.py" - - ```python hl_lines="12-15 20 27" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent - - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() +You can subclass [AppSyncResolverEvent](../../utilities/data_classes.md#appsync-resolver){target="_blank"} to bring your own set of methods to handle incoming events, by using `data_model` param in the `resolve` method. +=== "custom_models.py.py" - class MyCustomModel(AppSyncResolverEvent): - @property - def country_viewer(self) -> str: - return self.request_headers.get("cloudfront-viewer-country") - - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - if app.current_event.country_viewer == "US": - ... - return name + description - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context, data_model=MyCustomModel) + ```python hl_lines="4 7 23-25 28-29 36 43" + --8<-- "examples/event_handler_graphql/src/custom_models.py" ``` -=== "schema.graphql" +=== "nested_mappings_schema.graphql" ```typescript hl_lines="6 20" - schema { - query: Query - } - - type Query { - listLocations: [Location] - } - - type Location { - id: ID! - name: String! - description: String - address: String - } - - type Merchant { - id: String! - name: String! - description: String - locations: [Location] - } + --8<-- "examples/event_handler_graphql/src/nested_mappings_schema.graphql" ``` -=== "listLocations_event.json" - - ```json - { - "arguments": {}, - "identity": null, - "source": null, - "request": { - "headers": { - "x-forwarded-for": "1.2.3.4, 5.6.7.8", - "accept-encoding": "gzip, deflate, br", - "cloudfront-viewer-country": "NL", - "cloudfront-is-tablet-viewer": "false", - "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", - "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", - "cloudfront-forwarded-proto": "https", - "origin": "https://eu-west-1.console.aws.amazon.com", - "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", - "content-type": "application/json", - "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", - "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", - "content-length": "114", - "x-amz-user-agent": "AWS-Console-AppSync/", - "x-forwarded-proto": "https", - "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", - "accept-language": "en-US,en;q=0.5", - "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", - "cloudfront-is-desktop-viewer": "true", - "cloudfront-is-mobile-viewer": "false", - "accept": "*/*", - "x-forwarded-port": "443", - "cloudfront-is-smarttv-viewer": "false" - } - }, - "prev": null, - "info": { - "parentTypeName": "Query", - "selectionSetList": [ - "id", - "name", - "description" - ], - "selectionSetGraphQL": "{\n id\n name\n description\n}", - "fieldName": "listLocations", - "variables": {} - }, - "stash": {} - } +=== "graphql_transformer_list_locations.json" + + ```json hl_lines="18-19" + --8<-- "examples/event_handler_graphql/src/graphql_transformer_list_locations.json" ``` ### Split operations with Router @@ -715,59 +206,24 @@ You can subclass `AppSyncResolverEvent` to bring your own set of methods to hand ???+ tip Read the **[considerations section for trade-offs between monolithic and micro functions](./api_gateway.md#considerations){target="_blank"}**, as it's also applicable here. -As you grow the number of related GraphQL operations a given Lambda function should handle, it is natural to split them into separate files to ease maintenance - That's where the `Router` feature is useful. +As you grow the number of related GraphQL operations a given Lambda function should handle, it is natural to split them into separate files to ease maintenance - That's when the `Router` feature comes handy. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `location.py`, this is how you'd use the `Router` feature. +Let's assume you have `split_operation.py` as your Lambda function entrypoint and routes in `split_operation_module.py`. This is how you'd use the `Router` feature. -=== "resolvers/location.py" +=== "split_operation_module.py" We import **Router** instead of **AppSyncResolver**; syntax wise is exactly the same. - ```python hl_lines="4 7 10 15" - from typing import Any, Dict, List - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.appsync import Router - - logger = Logger(child=True) - router = Router() - - - @router.resolver(type_name="Query", field_name="listLocations") - def list_locations(merchant_id: str) -> List[Dict[str, Any]]: - return [{"name": "Location name", "merchant_id": merchant_id}] - + ```python hl_lines="4 8 18-19" + --8<-- "examples/event_handler_graphql/src/split_operation_module.py" + ``` - @router.resolver(type_name="Location", field_name="status") - def resolve_status(merchant_id: str) -> str: - logger.debug(f"Resolve status for merchant_id: {merchant_id}") - return "FOO" - ``` +=== "split_operation.py" -=== "app.py" + We use `include_router` method and include all `location` operations registered in the `router` global object. - We use `include_router` method and include all `location` operations registered in the `router` global object. - - ```python hl_lines="8 13" - from typing import Dict - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.logging.correlation_paths import APPSYNC_RESOLVER - from aws_lambda_powertools.utilities.typing import LambdaContext - - from resolvers import location - - tracer = Tracer() - logger = Logger() - app = AppSyncResolver() - app.include_router(location.router) - - - @tracer.capture_lambda_handler - @logger.inject_lambda_context(correlation_id_path=APPSYNC_RESOLVER) - def lambda_handler(event: Dict, context: LambdaContext): - app.resolve(event, context) + ```python hl_lines="1 11" + --8<-- "examples/event_handler_graphql/src/split_operation.py" ``` ## Testing your code @@ -778,89 +234,43 @@ You can use either `app.resolve(event, context)` or simply `app(event, context)` Here's an example of how you can test your synchronous resolvers: -=== "test_resolver.py" - - ```python - import json - import pytest - from pathlib import Path +=== "assert_graphql_response.py" - from src.index import app # import the instance of AppSyncResolver from your code - - def test_direct_resolver(): - # Load mock event from a file - json_file_path = Path("appSyncDirectResolver.json") - with open(json_file_path) as json_file: - mock_event = json.load(json_file) - - # Call the implicit handler - result = app(mock_event, {}) - - assert result == "created this value" + ```python hl_lines="6 26 29" + --8<-- "examples/event_handler_graphql/src/assert_graphql_response.py" ``` -=== "src/index.py" - - ```python - - from aws_lambda_powertools.event_handler import AppSyncResolver - - app = AppSyncResolver() - - @app.resolver(field_name="createSomething") - def create_something(): - return "created this value" +=== "assert_graphql_response_module.py" + ```python hl_lines="10" + --8<-- "examples/event_handler_graphql/src/assert_graphql_response_module.py" ``` -=== "appSyncDirectResolver.json" +=== "assert_graphql_response.json" - ```json - --8<-- "tests/events/appSyncDirectResolver.json" + ```json hl_lines="5" + --8<-- "examples/event_handler_graphql/src/assert_graphql_response.json" ``` -And an example for testing asynchronous resolvers. Note that this requires the `pytest-asyncio` package: - -=== "test_async_resolver.py" +And an example for testing asynchronous resolvers. Note that this requires the `pytest-asyncio` package. This tests a specific async GraphQL operation. - ```python - import json - import pytest - from pathlib import Path - - from src.index import app # import the instance of AppSyncResolver from your code - - @pytest.mark.asyncio - async def test_direct_resolver(): - # Load mock event from a file - json_file_path = Path("appSyncDirectResolver.json") - with open(json_file_path) as json_file: - mock_event = json.load(json_file) +???+ note + Alternatively, you can continue call `lambda_handler` function synchronously as it'd run `asyncio.run` to await for the coroutine to complete. - # Call the implicit handler - result = await app(mock_event, {}) +=== "assert_async_graphql_response.py" - assert result == "created this value" + ```python hl_lines="27" + --8<-- "examples/event_handler_graphql/src/assert_async_graphql_response.py" ``` -=== "src/index.py" - - ```python - import asyncio - - from aws_lambda_powertools.event_handler import AppSyncResolver - - app = AppSyncResolver() - - @app.resolver(field_name="createSomething") - async def create_something_async(): - await asyncio.sleep(1) # Do async stuff - return "created this value" +=== "assert_async_graphql_response_module.py" + ```python hl_lines="14" + --8<-- "examples/event_handler_graphql/src/assert_async_graphql_response_module.py" ``` -=== "appSyncDirectResolver.json" +=== "assert_async_graphql_response.json" - ```json - --8<-- "tests/events/appSyncDirectResolver.json" + ```json hl_lines="3 4" + --8<-- "examples/event_handler_graphql/src/assert_async_graphql_response.json" ``` diff --git a/docs/core/logger.md b/docs/core/logger.md index 23d57e251b9..c699568b349 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -14,6 +14,9 @@ Logger provides an opinionated logger with output structured as JSON. ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + Logger requires two settings: | Setting | Description | Environment variable | Constructor parameter | @@ -45,13 +48,13 @@ Your Logger will include the following keys to your structured logging: You can enrich your structured logs with key Lambda context information via `inject_lambda_context`. -=== "collect.py" +=== "inject_lambda_context.py" ```python hl_lines="7" --8<-- "examples/logger/src/inject_lambda_context.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "inject_lambda_context_output.json" ```json hl_lines="8-12 17-20" --8<-- "examples/logger/src/inject_lambda_context_output.json" @@ -85,19 +88,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME ???+ tip You can retrieve correlation IDs via `get_correlation_id` method -=== "collect.py" +=== "set_correlation_id.py" ```python hl_lines="7" --8<-- "examples/logger/src/set_correlation_id.py" ``` -=== "Example Event" +=== "set_correlation_id_event.json" ```json hl_lines="3" --8<-- "examples/logger/src/set_correlation_id_event.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/set_correlation_id_output.json" @@ -107,18 +110,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME You can also use `set_correlation_id` method to inject it anywhere else in your code. Example below uses [Event Source Data Classes utility](../utilities/data_classes.md) to easily access events properties. -=== "collect.py" +=== "set_correlation_id_method.py" ```python hl_lines="11" --8<-- "examples/logger/src/set_correlation_id_method.py" ``` -=== "Example Event" + +=== "set_correlation_id_method.json" ```json hl_lines="3" - --8<-- "examples/logger/src/set_correlation_id_method_event.json" + --8<-- "examples/logger/src/set_correlation_id_method.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_method_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/set_correlation_id_method_output.json" @@ -128,19 +132,19 @@ You can also use `set_correlation_id` method to inject it anywhere else in your To ease routine tasks like extracting correlation ID from popular event sources, we provide [built-in JMESPath expressions](#built-in-correlation-id-expressions). -=== "collect.py" +=== "set_correlation_id_jmespath.py" ```python hl_lines="2 8" --8<-- "examples/logger/src/set_correlation_id_jmespath.py" ``` -=== "Example Event" +=== "set_correlation_id_jmespath.json" ```json hl_lines="3" - --8<-- "examples/logger/src/set_correlation_id_jmespath_event.json" + --8<-- "examples/logger/src/set_correlation_id_jmespath.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_jmespath_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/set_correlation_id_jmespath_output.json" @@ -163,12 +167,13 @@ You can append additional keys using either mechanism: You can append your own keys to your existing Logger via `append_keys(**additional_key_values)` method. -=== "collect.py" +=== "append_keys.py" ```python hl_lines="12" --8<-- "examples/logger/src/append_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_keys_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/append_keys_output.json" @@ -188,12 +193,13 @@ It accepts any dictionary, and all keyword arguments will be added as part of th ???+ info Any keyword argument added using `extra` will not be persisted for subsequent messages. -=== "extra_parameter.py" +=== "append_keys_extra.py" ```python hl_lines="9" --8<-- "examples/logger/src/append_keys_extra.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_keys_extra_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/append_keys_extra_output.json" @@ -203,13 +209,13 @@ It accepts any dictionary, and all keyword arguments will be added as part of th You can remove any additional key from Logger state using `remove_keys`. -=== "collect.py" +=== "remove_keys.py" ```python hl_lines="11" --8<-- "examples/logger/src/remove_keys.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "remove_keys_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/remove_keys_output.json" @@ -229,19 +235,19 @@ Logger is commonly initialized in the global scope. Due to [Lambda Execution Con You can either avoid running any code as part of Lambda Layers global scope, or override keys with their latest value as part of handler's execution. -=== "collect.py" +=== "clear_state.py" ```python hl_lines="7 10" --8<-- "examples/logger/src/clear_state.py" ``` -=== "#1 request" +=== "clear_state_event_one.json" ```json hl_lines="7" --8<-- "examples/logger/src/clear_state_event_one.json" ``` -=== "#2 request" +=== "clear_state_event_two.json" ```json hl_lines="7" --8<-- "examples/logger/src/clear_state_event_two.json" @@ -254,13 +260,13 @@ Use `logger.exception` method to log contextual information about exceptions. Lo ???+ tip You can use your preferred Log Analytics tool to enumerate and visualize exceptions across all your services using `exception_name` key. -=== "collect.py" +=== "logging_exceptions.py" ```python hl_lines="15" --8<-- "examples/logger/src/logging_exceptions.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "logging_exceptions_output.json" ```json hl_lines="7-8" --8<-- "examples/logger/src/logging_exceptions_output.json" @@ -289,19 +295,19 @@ Similar to [Tracer](./tracer.md#reusing-tracer-across-your-code), a new instance Notice in the CloudWatch Logs output how `payment_id` appeared as expected when logging in `collect.py`. -=== "collect.py" +=== "logger_reuse.py" ```python hl_lines="1 9 11 12" --8<-- "examples/logger/src/logger_reuse.py" ``` -=== "payment.py" +=== "logger_reuse_payment.py" ```python hl_lines="3 7" --8<-- "examples/logger/src/logger_reuse_payment.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "logger_reuse_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/logger_reuse_output.json" @@ -310,7 +316,7 @@ Notice in the CloudWatch Logs output how `payment_id` appeared as expected when ???+ note "Note: About Child Loggers" Coming from standard library, you might be used to use `logging.getLogger(__name__)`. This will create a new instance of a Logger with a different name. - In Powertools, you can have the same effect by using `child=True` parameter: `Logger(child=True)`. This creates a new Logger instance named after `service.`. All state changes will be propagated bi-directonally between Child and Parent. + In Powertools, you can have the same effect by using `child=True` parameter: `Logger(child=True)`. This creates a new Logger instance named after `service.`. All state changes will be propagated bi-directionally between Child and Parent. For that reason, there could be side effects depending on the order the Child Logger is instantiated, because Child Loggers don't have a handler. @@ -334,15 +340,15 @@ Sampling decision happens at the Logger initialization. This means sampling may ???+ note Open a [feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=) if you want Logger to calculate sampling for every invocation -=== "collect.py" +=== "sampling_debug_logs.py" ```python hl_lines="6 10" - --8<-- "examples/logger/src/logger_reuse.py" + --8<-- "examples/logger/src/sampling_debug_logs.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "sampling_debug_logs_output.json" - ```json hl_lines="3 5 13 16 25" + ```json hl_lines="3 5 13 16 26" --8<-- "examples/logger/src/sampling_debug_logs_output.json" ``` @@ -390,13 +396,13 @@ For child Loggers, we introspect the name of your module where `Logger(child=Tru ???+ danger A common issue when migrating from other Loggers is that `service` might be defined in the parent Logger (no child param), and not defined in the child Logger: -=== "incorrect_logger_inheritance.py" +=== "logging_inheritance_bad.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_bad.py" ``` -=== "my_other_module.py" +=== "logging_inheritance_module.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_module.py" @@ -409,13 +415,13 @@ In this case, Logger will register a Logger named `payment`, and a Logger named Do this instead: -=== "correct_logger_inheritance.py" +=== "logging_inheritance_good.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_good.py" ``` -=== "my_other_module.py" +=== "logging_inheritance_module.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_module.py" @@ -432,13 +438,13 @@ You might want to continue to use the same date formatting style, or override `l Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`. -=== "lambda_handler.py" +=== "overriding_log_records.py" ```python hl_lines="7 10" --8<-- "examples/logger/src/overriding_log_records.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "overriding_log_records_output.json" ```json hl_lines="3 5" --8<-- "examples/logger/src/overriding_log_records_output.json" @@ -448,12 +454,13 @@ Logger allows you to either change the format or suppress the following keys alt You can change the order of [standard Logger keys](#standard-structured-keys) or any keys that will be appended later at runtime via the `log_record_order` parameter. -=== "app.py" +=== "reordering_log_keys.py" ```python hl_lines="5 8" --8<-- "examples/logger/src/reordering_log_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "reordering_log_keys_output.json" ```json hl_lines="3 10" --8<-- "examples/logger/src/reordering_log_keys_output.json" @@ -463,13 +470,13 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or By default, this Logger and standard logging library emits records using local time timestamp. You can override this behavior via `utc` parameter: -=== "app.py" +=== "setting_utc_timestamp.py" ```python hl_lines="6" --8<-- "examples/logger/src/setting_utc_timestamp.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "setting_utc_timestamp_output.json" ```json hl_lines="6 13" --8<-- "examples/logger/src/setting_utc_timestamp_output.json" @@ -479,13 +486,13 @@ By default, this Logger and standard logging library emits records using local t By default, Logger uses `str` to handle values non-serializable by JSON. You can override this behavior via `json_default` parameter by passing a Callable: -=== "app.py" +=== "unserializable_values.py" ```python hl_lines="6 17" --8<-- "examples/logger/src/unserializable_values.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "unserializable_values_output.json" ```json hl_lines="4-6" --8<-- "examples/logger/src/unserializable_values_output.json" @@ -508,13 +515,13 @@ By default, Logger uses [LambdaPowertoolsFormatter](#lambdapowertoolsformatter) For these, you can override the `serialize` method from [LambdaPowertoolsFormatter](#lambdapowertoolsformatter). -=== "custom_formatter.py" +=== "bring_your_own_formatter.py" ```python hl_lines="2 5-6 12" --8<-- "examples/logger/src/bring_your_own_formatter.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "bring_your_own_formatter_output.json" ```json hl_lines="6" --8<-- "examples/logger/src/bring_your_own_formatter_output.json" ``` @@ -526,13 +533,13 @@ For exceptional cases where you want to completely replace our formatter logic, ???+ warning You will need to implement `append_keys`, `clear_state`, override `format`, and optionally `remove_keys` to keep the same feature set Powertools Logger provides. This also means keeping state of logging keys added. -=== "collect.py" +=== "bring_your_own_formatter_from_scratch.py" ```python hl_lines="6 9 11-12 15 19 23 26 38" --8<-- "examples/logger/src/bring_your_own_formatter_from_scratch.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "bring_your_own_formatter_from_scratch_output.json" ```json hl_lines="2-4" --8<-- "examples/logger/src/bring_your_own_formatter_from_scratch_output.json" @@ -612,15 +619,16 @@ You can include any of these logging attributes as key value arguments (`kwargs` You can also add them later anywhere in your code with `append_keys`, or remove them with `remove_keys` methods. -=== "collect.py" +=== "append_and_remove_keys.py" ```python hl_lines="3 8 10" ---8<-- "examples/logger/src/append_and_remove_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_and_remove_keys_output.json" ```json hl_lines="6 15-16" - ---8<-- "examples/logger/src/append_and_remove_keys.json" + ---8<-- "examples/logger/src/append_and_remove_keys_output.json" ``` For log records originating from Powertools Logger, the `name` attribute will be the same as `service`, for log records coming from standard library logger, it will be the name of the logger (i.e. what was used as name argument to `logging.getLogger`). @@ -631,13 +639,13 @@ Keys added with `append_keys` will persist across multiple log messages while ke Here's an example where we persist `payment_id` not `request_id`. Note that `payment_id` remains in both log messages while `booking_id` is only available in the first message. -=== "collect.py" +=== "append_keys_vs_extra.py" ```python hl_lines="16 23" ---8<-- "examples/logger/src/append_keys_vs_extra.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "append_keys_vs_extra_output.json" ```json hl_lines="9-10 19" ---8<-- "examples/logger/src/append_keys_vs_extra_output.json" diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 24a8f1e6fda..843e35b7eb8 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -28,6 +28,9 @@ If you're new to Amazon CloudWatch, there are two terminologies you must be awar ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + Metric has two global settings that will be used across all metrics emitted: | Setting | Description | Environment variable | Constructor parameter | @@ -54,13 +57,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all ???+ tip You can initialize Metrics in any other module too. It'll keep track of your aggregate metrics in memory to optimize costs (one blob instead of multiples). -=== "Metrics" +=== "add_metrics.py" ```python hl_lines="10" --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Metrics with custom dimensions" +=== "add_dimension.py" ```python hl_lines="13" --8<-- "examples/metrics/src/add_dimension.py" @@ -79,13 +82,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all You can call `add_metric()` with the same metric name multiple times. The values will be grouped together in a list. -=== "Metrics" +=== "add_multi_value_metrics.py" ```python hl_lines="14-15" --8<-- "examples/metrics/src/add_multi_value_metrics.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "add_multi_value_metrics_output.json" ```python hl_lines="15 24-26" --8<-- "examples/metrics/src/add_multi_value_metrics_output.json" @@ -97,13 +100,13 @@ You can use `set_default_dimensions` method, or `default_dimensions` parameter i If you'd like to remove them at some point, you can use `clear_default_dimensions` method. -=== "set_default_dimensions method" +=== "set_default_dimensions.py" ```python hl_lines="9" --8<-- "examples/metrics/src/set_default_dimensions.py" ``` -=== "with log_metrics decorator" +=== "set_default_dimensions_log_metrics.py" ```python hl_lines="9 13" --8<-- "examples/metrics/src/set_default_dimensions_log_metrics.py" @@ -115,13 +118,13 @@ As you finish adding all your metrics, you need to serialize and flush them to s This decorator also **validates**, **serializes**, and **flushes** all your metrics. During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised. -=== "app.py" +=== "add_metrics.py" ```python hl_lines="8" --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "log_metrics_output.json" ```json hl_lines="6 9 14 21-23" --8<-- "examples/metrics/src/log_metrics_output.json" @@ -149,13 +152,13 @@ If you want to ensure at least one metric is always emitted, you can pass `raise You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. -=== "app.py" +=== "capture_cold_start_metric.py" ```python hl_lines="7" --8<-- "examples/metrics/src/capture_cold_start_metric.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "capture_cold_start_metric_output.json" ```json hl_lines="9 15 22 24-25" --8<-- "examples/metrics/src/capture_cold_start_metric_output.json" @@ -180,13 +183,13 @@ You can add high-cardinality data as part of your Metrics log with `add_metadata ???+ info **This will not be available during metrics visualization** - Use **dimensions** for this purpose -=== "app.py" +=== "add_metadata.py" ```python hl_lines="14" --8<-- "examples/metrics/src/add_metadata.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "add_metadata_output.json" ```json hl_lines="22" --8<-- "examples/metrics/src/add_metadata_output.json" @@ -201,13 +204,13 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met **unique metric = (metric_name + dimension_name + dimension_value)** -=== "app.py" +=== "single_metric.py" ```python hl_lines="11" --8<-- "examples/metrics/src/single_metric.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "single_metric_output.json" ```json hl_lines="15" --8<-- "examples/metrics/src/single_metric_output.json" @@ -254,7 +257,7 @@ Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` be You can read standard output and assert whether metrics have been flushed. Here's an example using `pytest` with `capsys` built-in fixture: -=== "Asserting single EMF blob" +=== "assert_single_emf_blob.py" ```python hl_lines="6 9-10 23-34" --8<-- "examples/metrics/src/assert_single_emf_blob.py" @@ -266,7 +269,7 @@ You can read standard output and assert whether metrics have been flushed. Here' --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Asserting multiple EMF blobs" +=== "assert_multiple_emf_blobs.py" This will be needed when using `capture_cold_start_metric=True`, or when both `Metrics` and `single_metric` are used. @@ -274,7 +277,7 @@ You can read standard output and assert whether metrics have been flushed. Here' --8<-- "examples/metrics/src/assert_multiple_emf_blobs.py" ``` -=== "my_other_module.py" +=== "assert_multiple_emf_blobs_module.py" ```python --8<-- "examples/metrics/src/assert_multiple_emf_blobs_module.py" diff --git a/docs/core/tracer.md b/docs/core/tracer.md index c8037eff241..8fbfc0e29f7 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -16,6 +16,9 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + ### Permissions Before your use this utility, your AWS Lambda function [must have permissions](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html#services-xray-permissions) to send traces to AWS X-Ray. @@ -74,19 +77,19 @@ You can trace synchronous functions using the `capture_method` decorator. You can trace asynchronous functions and generator functions (including context managers) using `capture_method`. -=== "Async" +=== "capture_method_async.py" ```python hl_lines="9" --8<-- "examples/tracer/src/capture_method_async.py" ``` -=== "Context manager" +=== "capture_method_context_manager.py" ```python hl_lines="12-13" --8<-- "examples/tracer/src/capture_method_context_manager.py" ``` -=== "Generators" +=== "capture_method_generators.py" ```python hl_lines="9" --8<-- "examples/tracer/src/capture_method_generators.py" @@ -113,13 +116,13 @@ Use **`capture_response=False`** parameter in both `capture_lambda_handler` and 2. You might manipulate **streaming objects that can be read only once**; this prevents subsequent calls from being empty 3. You might return **more than 64K** of data _e.g., `message too long` error_ -=== "sensitive_data_scenario.py" +=== "disable_capture_response.py" ```python hl_lines="8 15" --8<-- "examples/tracer/src/disable_capture_response.py" ``` -=== "streaming_object_scenario.py" +=== "disable_capture_response_streaming_body.py" ```python hl_lines="19" --8<-- "examples/tracer/src/disable_capture_response_streaming_body.py" @@ -189,17 +192,17 @@ Tracer keeps a copy of its configuration after the first initialization. This is Tracer will automatically ignore imported modules that have been patched. -=== "handler.py" +=== "tracer_reuse.py" ```python hl_lines="1 6" --8<-- "examples/tracer/src/tracer_reuse.py" ``` -=== "tracer_reuse_payment.py" +=== "tracer_reuse_module.py" A new instance of Tracer will be created but will reuse the previous Tracer instance configuration, similar to a Singleton. ```python hl_lines="3" - --8<-- "examples/tracer/src/tracer_reuse_payment.py" + --8<-- "examples/tracer/src/tracer_reuse_module.py" ``` ## Testing your code diff --git a/docs/index.md b/docs/index.md index cc3d437334e..6467dcdeb35 100644 --- a/docs/index.md +++ b/docs/index.md @@ -7,15 +7,14 @@ description: AWS Lambda Powertools Python A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, idempotency, batching, and more. -???+ tip "Tip: Looking for a quick read through how the core features are used?" - - Check out [this detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/) with a practical example. +???+ note + Lambda Powertools is also available for [Java](https://awslabs.github.io/aws-lambda-powertools-java/){target="_blank"} and [TypeScript](https://awslabs.github.io/aws-lambda-powertools-typescript/latest/){target="_blank"}. ## Install Powertools is available in the following formats: -* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:21**](#){: .copyMe}:clipboard: +* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:22**](#){: .copyMe}:clipboard: * **PyPi**: **`pip install aws-lambda-powertools`** ???+ hint "Support this project by using Lambda Layers :heart:" @@ -33,23 +32,23 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: | Region | Layer ARN | | ---------------- | -------------------------------------------------------------------------------------------------------- | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | ??? question "Can't find our Lambda Layer for your preferred AWS region?" You can use [Serverless Application Repository (SAR)](#sar) method, our [CDK Layer Construct](https://github.com/aws-samples/cdk-lambda-powertools-python-layer){target="_blank"}, or PyPi like you normally would for any other library. @@ -63,7 +62,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:21 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:22 ``` === "Serverless framework" @@ -73,7 +72,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:21 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:22 ``` === "CDK" @@ -89,7 +88,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:21" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:22" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -138,7 +137,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:21"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:22"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -157,7 +156,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22 ❯ amplify push -y @@ -168,7 +167,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22 ? Do you want to edit the local lambda function now? No ``` @@ -176,7 +175,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:21 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:22 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. @@ -214,7 +213,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Properties: Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - SemanticVersion: 1.25.10 # change to latest semantic version available in SAR + SemanticVersion: 1.26.3 # change to latest semantic version available in SAR MyLambdaFunction: Type: AWS::Serverless::Function @@ -242,7 +241,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - SemanticVersion: 1.25.10 + SemanticVersion: 1.26.3 ``` === "CDK" @@ -252,7 +251,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, POWERTOOLS_BASE_NAME = 'AWSLambdaPowertools' # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - POWERTOOLS_VER = '1.23.0' + POWERTOOLS_VER = '1.26.3' POWERTOOLS_ARN = 'arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer' class SampleApp(core.Construct): @@ -316,7 +315,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, variable "aws_powertools_version" { type = string - default = "1.20.2" + default = "1.26.3" description = "The AWS Powertools release version" } diff --git a/examples/event_handler_graphql/sam/template.yaml b/examples/event_handler_graphql/sam/template.yaml new file mode 100644 index 00000000000..3e2ab60ab10 --- /dev/null +++ b/examples/event_handler_graphql/sam/template.yaml @@ -0,0 +1,124 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Hello world Direct Lambda Resolver + +Globals: + Function: + Timeout: 5 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_SERVICE_NAME: example + +Resources: + TodosFunction: + Type: AWS::Serverless::Function + Properties: + Handler: getting_started_graphql_api_resolver.lambda_handler + CodeUri: ../src + Description: Sample Direct Lambda Resolver + + # IAM Permissions and Roles + + AppSyncServiceRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "appsync.amazonaws.com" + Action: + - "sts:AssumeRole" + + InvokeLambdaResolverPolicy: + Type: "AWS::IAM::Policy" + Properties: + PolicyName: "DirectAppSyncLambda" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Action: "lambda:invokeFunction" + Resource: + - !GetAtt TodosFunction.Arn + Roles: + - !Ref AppSyncServiceRole + + # GraphQL API + + TodosApi: + Type: "AWS::AppSync::GraphQLApi" + Properties: + Name: TodosApi + AuthenticationType: "API_KEY" + XrayEnabled: true + + TodosApiKey: + Type: AWS::AppSync::ApiKey + Properties: + ApiId: !GetAtt TodosApi.ApiId + + TodosApiSchema: + Type: "AWS::AppSync::GraphQLSchema" + Properties: + ApiId: !GetAtt TodosApi.ApiId + Definition: | + schema { + query:Query + } + + type Query { + getTodo(id: ID!): Todo + listTodos: [Todo] + } + + type Todo { + id: ID! + userId: String + title: String + completed: Boolean + } + + # Lambda Direct Data Source and Resolver + + TodosFunctionDataSource: + Type: "AWS::AppSync::DataSource" + Properties: + ApiId: !GetAtt TodosApi.ApiId + Name: "HelloWorldLambdaDirectResolver" + Type: "AWS_LAMBDA" + ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn + LambdaConfig: + LambdaFunctionArn: !GetAtt TodosFunction.Arn + + ListTodosResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt TodosApi.ApiId + TypeName: "Query" + FieldName: "listTodos" + DataSourceName: !GetAtt TodosFunctionDataSource.Name + + GetTodoResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt TodosApi.ApiId + TypeName: "Query" + FieldName: "getTodo" + DataSourceName: !GetAtt TodosFunctionDataSource.Name + +Outputs: + TodosFunction: + Description: "Hello World Lambda Function ARN" + Value: !GetAtt TodosFunction.Arn + + TodosApi: + Value: !GetAtt TodosApi.Arn diff --git a/examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql b/examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql new file mode 100644 index 00000000000..0bd6949cb91 --- /dev/null +++ b/examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql @@ -0,0 +1,23 @@ +@model +type Merchant { + id: String! + name: String! + description: String + # Resolves to `common_field` + commonField: String @function(name: "merchantInfo-${env}") +} + +type Location { + id: ID! + name: String! + address: String + # Resolves to `common_field` + commonField: String @function(name: "merchantInfo-${env}") +} + +type Query { + # List of locations resolves to `list_locations` + listLocations(page: Int, size: Int): [Location] @function(name: "merchantInfo-${env}") + # List of locations resolves to `list_locations` + findMerchant(search: str): [Merchant] @function(name: "searchMerchant-${env}") +} diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response.json b/examples/event_handler_graphql/src/assert_async_graphql_response.json new file mode 100644 index 00000000000..e22d4e741cd --- /dev/null +++ b/examples/event_handler_graphql/src/assert_async_graphql_response.json @@ -0,0 +1,43 @@ +{ + "typeName": "Query", + "fieldName": "listTodos", + "arguments": {}, + "selectionSetList": [ + "id", + "userId", + "completed" + ], + "identity": { + "claims": { + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "email_verified": true, + "iss": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "phone_number_verified": false, + "cognito:username": "jdoe", + "aud": "7471s60os7h0uu77i1tk27sp9n", + "event_id": "bc334ed8-a938-4474-b644-9547e304e606", + "token_use": "id", + "auth_time": 1599154213, + "phone_number": "+19999999999", + "exp": 1599157813, + "iat": 1599154213, + "email": "jdoe@email.com" + }, + "defaultAuthStrategy": "ALLOW", + "groups": null, + "issuer": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "sourceIp": [ + "1.1.1.1" + ], + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "username": "jdoe" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1", + "cloudfront-viewer-country": "NL", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq" + } + } +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response.py b/examples/event_handler_graphql/src/assert_async_graphql_response.py new file mode 100644 index 00000000000..22eceb1c5d0 --- /dev/null +++ b/examples/event_handler_graphql/src/assert_async_graphql_response.py @@ -0,0 +1,34 @@ +import json +from dataclasses import dataclass +from pathlib import Path + +import pytest +from assert_async_graphql_response_module import Location, app # instance of AppSyncResolver + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:123456789012:function:test" + aws_request_id: str = "da658bd3-2d6f-4e7b-8ec2-937234644fdc" + + return LambdaContext() + + +@pytest.mark.asyncio +async def test_async_direct_resolver(lambda_context): + # GIVEN + fake_event = json.loads(Path("assert_async_graphql_response.json").read_text()) + + # WHEN + result: list[Location] = await app(fake_event, lambda_context) + # alternatively, you can also run a sync test against `lambda_handler` + # since `lambda_handler` awaits the coroutine to complete + + # THEN + assert result[0]["userId"] == 1 + assert result[0]["id"] == 1 + assert result[0]["completed"] is False diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response_module.py b/examples/event_handler_graphql/src/assert_async_graphql_response_module.py new file mode 100644 index 00000000000..892da71fb0f --- /dev/null +++ b/examples/event_handler_graphql/src/assert_async_graphql_response_module.py @@ -0,0 +1,37 @@ +import asyncio +from typing import TypedDict + +import aiohttp + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.tracing import aiohttp_trace_config +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Todo(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + userId: str + title: str + completed: bool + + +@app.resolver(type_name="Query", field_name="listTodos") +async def list_todos() -> list[Todo]: + async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session: + async with session.get("https://jsonplaceholder.typicode.com/todos") as resp: + # first two results to demo assertion + return await resp.json()[:2] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + result = app.resolve(event, context) + + return asyncio.run(result) diff --git a/examples/event_handler_graphql/src/assert_graphql_response.json b/examples/event_handler_graphql/src/assert_graphql_response.json new file mode 100644 index 00000000000..7d5fe1be12e --- /dev/null +++ b/examples/event_handler_graphql/src/assert_graphql_response.json @@ -0,0 +1,45 @@ +{ + "typeName": "Query", + "fieldName": "listLocations", + "arguments": { + "name": "Perkins-Reed", + "description": "Nulla sed amet. Earum libero qui sunt perspiciatis. Non aliquid accusamus." + }, + "selectionSetList": [ + "id", + "name" + ], + "identity": { + "claims": { + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "email_verified": true, + "iss": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "phone_number_verified": false, + "cognito:username": "jdoe", + "aud": "7471s60os7h0uu77i1tk27sp9n", + "event_id": "bc334ed8-a938-4474-b644-9547e304e606", + "token_use": "id", + "auth_time": 1599154213, + "phone_number": "+19999999999", + "exp": 1599157813, + "iat": 1599154213, + "email": "jdoe@email.com" + }, + "defaultAuthStrategy": "ALLOW", + "groups": null, + "issuer": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "sourceIp": [ + "1.1.1.1" + ], + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "username": "jdoe" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1", + "cloudfront-viewer-country": "NL", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq" + } + } +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/assert_graphql_response.py b/examples/event_handler_graphql/src/assert_graphql_response.py new file mode 100644 index 00000000000..548aece15e0 --- /dev/null +++ b/examples/event_handler_graphql/src/assert_graphql_response.py @@ -0,0 +1,29 @@ +import json +from dataclasses import dataclass +from pathlib import Path + +import pytest +from assert_graphql_response_module import Location, app # instance of AppSyncResolver + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:123456789012:function:test" + aws_request_id: str = "da658bd3-2d6f-4e7b-8ec2-937234644fdc" + + return LambdaContext() + + +def test_direct_resolver(lambda_context): + # GIVEN + fake_event = json.loads(Path("assert_graphql_response.json").read_text()) + + # WHEN + result: list[Location] = app(fake_event, lambda_context) + + # THEN + assert result[0]["name"] == "Perkins-Reed" diff --git a/examples/event_handler_graphql/src/assert_graphql_response_module.py b/examples/event_handler_graphql/src/assert_graphql_response_module.py new file mode 100644 index 00000000000..2f9c8ac3c41 --- /dev/null +++ b/examples/event_handler_graphql/src/assert_graphql_response_module.py @@ -0,0 +1,30 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + + +@app.resolver(field_name="listLocations") +@app.resolver(field_name="locations") +@tracer.capture_method +def get_locations(name: str, description: str = "") -> list[Location]: # match GraphQL Query arguments + return [{"name": name, "description": description}] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/async_resolvers.py b/examples/event_handler_graphql/src/async_resolvers.py new file mode 100644 index 00000000000..229e015c886 --- /dev/null +++ b/examples/event_handler_graphql/src/async_resolvers.py @@ -0,0 +1,36 @@ +import asyncio +from typing import TypedDict + +import aiohttp + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.tracing import aiohttp_trace_config +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Todo(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + userId: str + title: str + completed: bool + + +@app.resolver(type_name="Query", field_name="listTodos") +async def list_todos() -> list[Todo]: + async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session: + async with session.get("https://jsonplaceholder.typicode.com/todos") as resp: + return await resp.json() + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + result = app.resolve(event, context) + + return asyncio.run(result) diff --git a/examples/event_handler_graphql/src/custom_models.py b/examples/event_handler_graphql/src/custom_models.py new file mode 100644 index 00000000000..92763ca3401 --- /dev/null +++ b/examples/event_handler_graphql/src/custom_models.py @@ -0,0 +1,43 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + commonField: str + + +class MyCustomModel(AppSyncResolverEvent): + @property + def country_viewer(self) -> str: + return self.get_header_value(name="cloudfront-viewer-country", default_value="", case_sensitive=False) + + @property + def api_key(self) -> str: + return self.get_header_value(name="x-api-key", default_value="", case_sensitive=False) + + +@app.resolver(type_name="Query", field_name="listLocations") +def list_locations(page: int = 0, size: int = 10) -> list[Location]: + # additional properties/methods will now be available under current_event + logger.debug(f"Request country origin: {app.current_event.country_viewer}") + return [{"id": scalar_types_utils.make_id(), "name": "Perry, James and Carroll"}] + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + app.resolve(event, context, data_model=MyCustomModel) diff --git a/examples/event_handler_graphql/src/getting_started_get_todo.json b/examples/event_handler_graphql/src/getting_started_get_todo.json new file mode 100644 index 00000000000..6cbf15ba36c --- /dev/null +++ b/examples/event_handler_graphql/src/getting_started_get_todo.json @@ -0,0 +1,46 @@ +{ + "arguments": { + "id": "7e362732-c8cd-4405-b090-144ac9b38960" + }, + "identity": null, + "source": null, + "request": { + "headers": { + "x-forwarded-for": "1.2.3.4, 5.6.7.8", + "accept-encoding": "gzip, deflate, br", + "cloudfront-viewer-country": "NL", + "cloudfront-is-tablet-viewer": "false", + "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", + "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", + "cloudfront-forwarded-proto": "https", + "origin": "https://eu-west-1.console.aws.amazon.com", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", + "content-type": "application/json", + "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", + "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", + "content-length": "114", + "x-amz-user-agent": "AWS-Console-AppSync/", + "x-forwarded-proto": "https", + "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", + "accept-language": "en-US,en;q=0.5", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "accept": "*/*", + "x-forwarded-port": "443", + "cloudfront-is-smarttv-viewer": "false" + } + }, + "prev": null, + "info": { + "parentTypeName": "Query", + "selectionSetList": [ + "title", + "id" + ], + "selectionSetGraphQL": "{\n title\n id\n}", + "fieldName": "getTodo", + "variables": {} + }, + "stash": {} +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py b/examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py new file mode 100644 index 00000000000..4e42bd42f58 --- /dev/null +++ b/examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py @@ -0,0 +1,48 @@ +from typing import TypedDict + +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Todo(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + userId: str + title: str + completed: bool + + +@app.resolver(type_name="Query", field_name="getTodo") +@tracer.capture_method +def get_todo( + id: str = "", # noqa AA03 VNE003 shadows built-in id to match query argument, e.g., getTodo(id: "some_id") +) -> Todo: + logger.info(f"Fetching Todo {id}") + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{id}") + todos.raise_for_status() + + return todos.json() + + +@app.resolver(type_name="Query", field_name="listTodos") +@tracer.capture_method +def list_todos() -> list[Todo]: + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return todos.json()[:10] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/getting_started_list_todos.json b/examples/event_handler_graphql/src/getting_started_list_todos.json new file mode 100644 index 00000000000..5be5094cf94 --- /dev/null +++ b/examples/event_handler_graphql/src/getting_started_list_todos.json @@ -0,0 +1,44 @@ +{ + "arguments": {}, + "identity": null, + "source": null, + "request": { + "headers": { + "x-forwarded-for": "1.2.3.4, 5.6.7.8", + "accept-encoding": "gzip, deflate, br", + "cloudfront-viewer-country": "NL", + "cloudfront-is-tablet-viewer": "false", + "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", + "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", + "cloudfront-forwarded-proto": "https", + "origin": "https://eu-west-1.console.aws.amazon.com", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", + "content-type": "application/json", + "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", + "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", + "content-length": "114", + "x-amz-user-agent": "AWS-Console-AppSync/", + "x-forwarded-proto": "https", + "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", + "accept-language": "en-US,en;q=0.5", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "accept": "*/*", + "x-forwarded-port": "443", + "cloudfront-is-smarttv-viewer": "false" + } + }, + "prev": null, + "info": { + "parentTypeName": "Query", + "selectionSetList": [ + "id", + "title" + ], + "selectionSetGraphQL": "{\n id\n title\n}", + "fieldName": "listTodos", + "variables": {} + }, + "stash": {} +} \ No newline at end of file diff --git a/docs/shared/getting_started_schema.graphql b/examples/event_handler_graphql/src/getting_started_schema.graphql similarity index 76% rename from docs/shared/getting_started_schema.graphql rename to examples/event_handler_graphql/src/getting_started_schema.graphql index c738156bd73..b8ef8f995d0 100644 --- a/docs/shared/getting_started_schema.graphql +++ b/examples/event_handler_graphql/src/getting_started_schema.graphql @@ -9,7 +9,7 @@ type Query { type Todo { id: ID! + userId: String title: String - description: String - done: Boolean + completed: Boolean } diff --git a/examples/event_handler_graphql/src/graphql_transformer_common_field.json b/examples/event_handler_graphql/src/graphql_transformer_common_field.json new file mode 100644 index 00000000000..6b8b47b8172 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_common_field.json @@ -0,0 +1,17 @@ +{ + "typeName": "Merchant", + "fieldName": "commonField", + "arguments": {}, + "identity": { + "claims": { + "iat": 1615366261 + }, + "username": "marieellis" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + } + }, +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/graphql_transformer_find_merchant.json b/examples/event_handler_graphql/src/graphql_transformer_find_merchant.json new file mode 100644 index 00000000000..8186ebc110e --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_find_merchant.json @@ -0,0 +1,19 @@ +{ + "typeName": "Query", + "fieldName": "findMerchant", + "arguments": { + "search": "Parry-Wood" + }, + "identity": { + "claims": { + "iat": 1615366261 + }, + "username": "wwilliams" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + } + }, +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/graphql_transformer_list_locations.json b/examples/event_handler_graphql/src/graphql_transformer_list_locations.json new file mode 100644 index 00000000000..b8f24aa70b6 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_list_locations.json @@ -0,0 +1,22 @@ +{ + "typeName": "Query", + "fieldName": "listLocations", + "arguments": { + "page": 2, + "size": 1 + }, + "identity": { + "claims": { + "iat": 1615366261 + }, + "username": "treid" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1", + "cloudfront-viewer-country": "NL", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq" + } + } +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/graphql_transformer_merchant_info.py b/examples/event_handler_graphql/src/graphql_transformer_merchant_info.py new file mode 100644 index 00000000000..272f119f3b8 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_merchant_info.py @@ -0,0 +1,36 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + commonField: str + + +@app.resolver(type_name="Query", field_name="listLocations") +def list_locations(page: int = 0, size: int = 10) -> list[Location]: + return [{"id": scalar_types_utils.make_id(), "name": "Smooth Grooves"}] + + +@app.resolver(field_name="commonField") +def common_field() -> str: + # Would match all fieldNames matching 'commonField' + return scalar_types_utils.make_id() + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/graphql_transformer_search_merchant.py b/examples/event_handler_graphql/src/graphql_transformer_search_merchant.py new file mode 100644 index 00000000000..e2adb566f93 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_search_merchant.py @@ -0,0 +1,42 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +from aws_lambda_powertools.utilities.typing import LambdaContext + +app = AppSyncResolver() +tracer = Tracer() +logger = Logger() + + +class Merchant(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + commonField: str + + +@app.resolver(type_name="Query", field_name="findMerchant") +def find_merchant(search: str) -> list[Merchant]: + merchants: list[Merchant] = [ + { + "id": scalar_types_utils.make_id(), + "name": "Parry-Wood", + "description": "Possimus doloremque tempora harum deleniti eum.", + }, + { + "id": scalar_types_utils.make_id(), + "name": "Shaw, Owen and Jones", + "description": "Aliquam iste architecto suscipit in.", + }, + ] + + return next((merchant for merchant in merchants if search == merchant["name"]), [{}]) + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/nested_mappings.py b/examples/event_handler_graphql/src/nested_mappings.py new file mode 100644 index 00000000000..2f9c8ac3c41 --- /dev/null +++ b/examples/event_handler_graphql/src/nested_mappings.py @@ -0,0 +1,30 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + + +@app.resolver(field_name="listLocations") +@app.resolver(field_name="locations") +@tracer.capture_method +def get_locations(name: str, description: str = "") -> list[Location]: # match GraphQL Query arguments + return [{"name": name, "description": description}] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/nested_mappings_schema.graphql b/examples/event_handler_graphql/src/nested_mappings_schema.graphql new file mode 100644 index 00000000000..23a9ae468b1 --- /dev/null +++ b/examples/event_handler_graphql/src/nested_mappings_schema.graphql @@ -0,0 +1,21 @@ +schema { + query: Query +} + +type Query { + listLocations: [Location] +} + +type Location { + id: ID! + name: String! + description: String + address: String +} + +type Merchant { + id: String! + name: String! + description: String + locations: [Location] +} diff --git a/examples/event_handler_graphql/src/scalar_functions.py b/examples/event_handler_graphql/src/scalar_functions.py new file mode 100644 index 00000000000..0d8fa98b7b3 --- /dev/null +++ b/examples/event_handler_graphql/src/scalar_functions.py @@ -0,0 +1,15 @@ +from aws_lambda_powertools.utilities.data_classes.appsync.scalar_types_utils import ( + aws_date, + aws_datetime, + aws_time, + aws_timestamp, + make_id, +) + +# Scalars: https://docs.aws.amazon.com/appsync/latest/devguide/scalars.html + +_: str = make_id() # Scalar: ID! +_: str = aws_date() # Scalar: AWSDate +_: str = aws_time() # Scalar: AWSTime +_: str = aws_datetime() # Scalar: AWSDateTime +_: int = aws_timestamp() # Scalar: AWSTimestamp diff --git a/examples/event_handler_graphql/src/split_operation.py b/examples/event_handler_graphql/src/split_operation.py new file mode 100644 index 00000000000..5704181d78c --- /dev/null +++ b/examples/event_handler_graphql/src/split_operation.py @@ -0,0 +1,17 @@ +import split_operation_module + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() +app.include_router(split_operation_module.router) + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/split_operation_module.py b/examples/event_handler_graphql/src/split_operation_module.py new file mode 100644 index 00000000000..43c413672b6 --- /dev/null +++ b/examples/event_handler_graphql/src/split_operation_module.py @@ -0,0 +1,22 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler.appsync import Router + +tracer = Tracer() +logger = Logger() +router = Router() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + + +@router.resolver(field_name="listLocations") +@router.resolver(field_name="locations") +@tracer.capture_method +def get_locations(name: str, description: str = "") -> list[Location]: # match GraphQL Query arguments + return [{"name": name, "description": description}] diff --git a/examples/event_handler_rest/sam/template.yaml b/examples/event_handler_rest/sam/template.yaml new file mode 100644 index 00000000000..513e6196f13 --- /dev/null +++ b/examples/event_handler_rest/sam/template.yaml @@ -0,0 +1,56 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Hello world event handler API Gateway + +Globals: + Api: + TracingEnabled: true + Cors: # see CORS section + AllowOrigin: "'https://example.com'" + AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" + MaxAge: "'300'" + BinaryMediaTypes: # see Binary responses section + - "*~1*" # converts to */* for any binary type + Function: + Timeout: 5 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_SERVICE_NAME: example + +Resources: + ApiFunction: + Type: AWS::Serverless::Function + Properties: + Handler: getting_started_rest_api_resolver.lambda_handler + CodeUri: ../src + Description: API handler function + Events: + AnyApiEvent: + Type: Api + Properties: + # NOTE: this is a catch-all rule to simplify the documentation. + # explicit routes and methods are recommended for prod instead (see below) + Path: /{proxy+} # Send requests on any path to the lambda function + Method: ANY # Send requests using any http method to the lambda function + + + # GetAllTodos: + # Type: Api + # Properties: + # Path: /todos + # Method: GET + # GetTodoById: + # Type: Api + # Properties: + # Path: /todos/{todo_id} + # Method: GET + # CreateTodo: + # Type: Api + # Properties: + # Path: /todos + # Method: POST diff --git a/examples/event_handler_rest/src/accessing_request_details.py b/examples/event_handler_rest/src/accessing_request_details.py new file mode 100644 index 00000000000..9929b601db0 --- /dev/null +++ b/examples/event_handler_rest/src/accessing_request_details.py @@ -0,0 +1,40 @@ +from typing import Optional + +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todo_id: str = app.current_event.get_query_string_value(name="id", default_value="") + # alternatively + _: Optional[str] = app.current_event.query_string_parameters.get("id") + + # Payload + _: Optional[str] = app.current_event.body # raw str | None + + endpoint = "https://jsonplaceholder.typicode.com/todos" + if todo_id: + endpoint = f"{endpoint}/{todo_id}" + + todos: Response = requests.get(endpoint) + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/accessing_request_details_headers.py b/examples/event_handler_rest/src/accessing_request_details_headers.py new file mode 100644 index 00000000000..f6bfb88c869 --- /dev/null +++ b/examples/event_handler_rest/src/accessing_request_details_headers.py @@ -0,0 +1,30 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + endpoint = "https://jsonplaceholder.typicode.com/todos" + + api_key: str = app.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + todos: Response = requests.get(endpoint, headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/assert_http_response.py b/examples/event_handler_rest/src/assert_http_response.py new file mode 100644 index 00000000000..95d56599288 --- /dev/null +++ b/examples/event_handler_rest/src/assert_http_response.py @@ -0,0 +1,28 @@ +from dataclasses import dataclass + +import assert_http_response_module +import pytest + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:123456789012:function:test" + aws_request_id: str = "da658bd3-2d6f-4e7b-8ec2-937234644fdc" + + return LambdaContext() + + +def test_lambda_handler(lambda_context): + minimal_event = { + "path": "/todos", + "httpMethod": "GET", + "requestContext": {"requestId": "227b78aa-779d-47d4-a48e-ce62120393b8"}, # correlation ID + } + + ret = assert_http_response_module.lambda_handler(minimal_event, lambda_context) + assert ret["statusCode"] == 200 + assert ret["body"] != "" diff --git a/examples/event_handler_rest/src/assert_http_response_module.py b/examples/event_handler_rest/src/assert_http_response_module.py new file mode 100644 index 00000000000..ea5d839fb72 --- /dev/null +++ b/examples/event_handler_rest/src/assert_http_response_module.py @@ -0,0 +1,27 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/binary_responses.json b/examples/event_handler_rest/src/binary_responses.json new file mode 100644 index 00000000000..fcdf86dfebe --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses.json @@ -0,0 +1,8 @@ +{ + "headers": { + "Accept": "image/svg+xml" + }, + "resource": "/logo", + "path": "/logo", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/binary_responses.py b/examples/event_handler_rest/src/binary_responses.py new file mode 100644 index 00000000000..00c027937b8 --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses.py @@ -0,0 +1,27 @@ +import os +from pathlib import Path + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() + + +app = APIGatewayRestResolver() +logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() + + +@app.get("/logo") +@tracer.capture_method +def get_logo(): + return Response(status_code=200, content_type="image/svg+xml", body=logo_file) + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/binary_responses_logo.svg b/examples/event_handler_rest/src/binary_responses_logo.svg new file mode 100644 index 00000000000..fccb29e01ed --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses_logo.svg @@ -0,0 +1,14 @@ + + + AWS Lambda + + + + + + + + + + + diff --git a/examples/event_handler_rest/src/binary_responses_output.json b/examples/event_handler_rest/src/binary_responses_output.json new file mode 100644 index 00000000000..0938dee6811 --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses_output.json @@ -0,0 +1,8 @@ +{ + "body": "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjU2cHgiIGhlaWdodD0iMjU2cHgiIHZpZXdCb3g9IjAgMCAyNTYgMjU2IiB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4KICAgIDx0aXRsZT5BV1MgTGFtYmRhPC90aXRsZT4KICAgIDxkZWZzPgogICAgICAgIDxsaW5lYXJHcmFkaWVudCB4MT0iMCUiIHkxPSIxMDAlIiB4Mj0iMTAwJSIgeTI9IjAlIiBpZD0ibGluZWFyR3JhZGllbnQtMSI+CiAgICAgICAgICAgIDxzdG9wIHN0b3AtY29sb3I9IiNDODUxMUIiIG9mZnNldD0iMCUiPjwvc3RvcD4KICAgICAgICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI0ZGOTkwMCIgb2Zmc2V0PSIxMDAlIj48L3N0b3A+CiAgICAgICAgPC9saW5lYXJHcmFkaWVudD4KICAgIDwvZGVmcz4KICAgIDxnPgogICAgICAgIDxyZWN0IGZpbGw9InVybCgjbGluZWFyR3JhZGllbnQtMSkiIHg9IjAiIHk9IjAiIHdpZHRoPSIyNTYiIGhlaWdodD0iMjU2Ij48L3JlY3Q+CiAgICAgICAgPHBhdGggZD0iTTg5LjYyNDExMjYsMjExLjIgTDQ5Ljg5MDMyNzcsMjExLjIgTDkzLjgzNTQ4MzIsMTE5LjM0NzIgTDExMy43NDcyOCwxNjAuMzM5MiBMODkuNjI0MTEyNiwyMTEuMiBaIE05Ni43MDI5MzU3LDExMC41Njk2IEM5Ni4xNjQwODU4LDEwOS40NjU2IDk1LjA0MTQ4MTMsMTA4Ljc2NDggOTMuODE2MjM4NCwxMDguNzY0OCBMOTMuODA2NjE2MywxMDguNzY0OCBDOTIuNTcxNzUxNCwxMDguNzY4IDkxLjQ0OTE0NjYsMTA5LjQ3NTIgOTAuOTE5OTE4NywxMTAuNTg1NiBMNDEuOTEzNDIwOCwyMTMuMDIwOCBDNDEuNDM4NzE5NywyMTQuMDEyOCA0MS41MDYwNzU4LDIxNS4xNzc2IDQyLjA5NjI0NTEsMjE2LjEwODggQzQyLjY3OTk5OTQsMjE3LjAzNjggNDMuNzA2MzgwNSwyMTcuNiA0NC44MDY1MzMxLDIxNy42IEw5MS42NTQ0MjMsMjE3LjYgQzkyLjg5NTcwMjcsMjE3LjYgOTQuMDIxNTE0OSwyMTYuODg2NCA5NC41NTM5NTAxLDIxNS43Njk2IEwxMjAuMjAzODU5LDE2MS42ODk2IEMxMjAuNjE3NjE5LDE2MC44MTI4IDEyMC42MTQ0MTIsMTU5Ljc5ODQgMTIwLjE4NzgyMiwxNTguOTI4IEw5Ni43MDI5MzU3LDExMC41Njk2IFogTTIwNy45ODUxMTcsMjExLjIgTDE2OC41MDc5MjgsMjExLjIgTDEwNS4xNzM3ODksNzguNjI0IEMxMDQuNjQ0NTYxLDc3LjUxMDQgMTAzLjUxNTU0MSw3Ni44IDEwMi4yNzc0NjksNzYuOCBMNzYuNDQ3OTQzLDc2LjggTDc2LjQ3NjgwOTksNDQuOCBMMTI3LjEwMzA2Niw0NC44IEwxOTAuMTQ1MzI4LDE3Ny4zNzI4IEMxOTAuNjc0NTU2LDE3OC40ODY0IDE5MS44MDM1NzUsMTc5LjIgMTkzLjA0MTY0NywxNzkuMiBMMjA3Ljk4NTExNywxNzkuMiBMMjA3Ljk4NTExNywyMTEuMiBaIE0yMTEuMTkyNTU4LDE3Mi44IEwxOTUuMDcxOTU4LDE3Mi44IEwxMzIuMDI5Njk2LDQwLjIyNzIgQzEzMS41MDA0NjgsMzkuMTEzNiAxMzAuMzcxNDQ5LDM4LjQgMTI5LjEzMDE2OSwzOC40IEw3My4yNzI1NzYsMzguNCBDNzEuNTA1Mjc1OCwzOC40IDcwLjA2ODM0MjEsMzkuODMwNCA3MC4wNjUxMzQ0LDQxLjU5NjggTDcwLjAyOTg1MjgsNzkuOTk2OCBDNzAuMDI5ODUyOCw4MC44NDggNzAuMzYzNDI2Niw4MS42NjA4IDcwLjk2OTYzMyw4Mi4yNjI0IEM3MS41Njk0MjQ2LDgyLjg2NCA3Mi4zODQxMTQ2LDgzLjIgNzMuMjM3Mjk0MSw4My4yIEwxMDAuMjUzNTczLDgzLjIgTDE2My41OTA5MiwyMTUuNzc2IEMxNjQuMTIzMzU1LDIxNi44ODk2IDE2NS4yNDU5NiwyMTcuNiAxNjYuNDg0MDMyLDIxNy42IEwyMTEuMTkyNTU4LDIxNy42IEMyMTIuOTY2Mjc0LDIxNy42IDIxNC40LDIxNi4xNjY0IDIxNC40LDIxNC40IEwyMTQuNCwxNzYgQzIxNC40LDE3NC4yMzM2IDIxMi45NjYyNzQsMTcyLjggMjExLjE5MjU1OCwxNzIuOCBMMjExLjE5MjU1OCwxNzIuOCBaIiBmaWxsPSIjRkZGRkZGIj48L3BhdGg+CiAgICA8L2c+Cjwvc3ZnPg==", + "headers": { + "Content-Type": "image/svg+xml" + }, + "isBase64Encoded": true, + "statusCode": 200 +} diff --git a/examples/event_handler_rest/src/compressing_responses.json b/examples/event_handler_rest/src/compressing_responses.json new file mode 100644 index 00000000000..f706df20d58 --- /dev/null +++ b/examples/event_handler_rest/src/compressing_responses.json @@ -0,0 +1,8 @@ +{ + "headers": { + "Accept-Encoding": "gzip" + }, + "resource": "/todos", + "path": "/todos", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/compressing_responses.py b/examples/event_handler_rest/src/compressing_responses.py new file mode 100644 index 00000000000..1af4b9a58b2 --- /dev/null +++ b/examples/event_handler_rest/src/compressing_responses.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos", compress=True) +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/compressing_responses_output.json b/examples/event_handler_rest/src/compressing_responses_output.json new file mode 100644 index 00000000000..0836b3aa726 --- /dev/null +++ b/examples/event_handler_rest/src/compressing_responses_output.json @@ -0,0 +1,9 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "Content-Encoding": "gzip" + }, + "body": "H4sIAAAAAAACE42STU4DMQyFrxJl3QXln96AMyAW7sSDLCVxiJ0Kqerd8TCCUOgii1EmP/783pOPXjmw+N3L0TfB+hz8brvxtC5KGtHvfMCIkzZx0HT5MPmNnziViIr2dIYoeNr8Q1x3xHsjcVadIbkZJoq2RXU8zzQROLseQ9505NzeCNQdMJNBE+UmY4zbzjAJhWtlZ57sB84BWtul+rteH2HPlVgWARwjqXkxpklK5gmEHAQqJBMtFsGVygcKmNVRjG0wxvuzGF2L0dpVUOKMC3bfJNjJgWMrCuZk7cUp02AiD72D6WKHHwUDKbiJs6AZ0VZXKOUx4uNvzdxT+E4mLcMA+6G8nzrLQkaxkNEVrFKW2VGbJCoCY7q2V3+tiv5kGThyxfTecDWbgGz/NfYXhL6ePgF9PnFdPgMAAA==", + "isBase64Encoded": true +} diff --git a/examples/event_handler_rest/src/custom_api_mapping.json b/examples/event_handler_rest/src/custom_api_mapping.json new file mode 100644 index 00000000000..eb1d68afbf9 --- /dev/null +++ b/examples/event_handler_rest/src/custom_api_mapping.json @@ -0,0 +1,5 @@ +{ + "resource": "/subscriptions/{subscription}", + "path": "/payment/subscriptions/123", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/custom_api_mapping.py b/examples/event_handler_rest/src/custom_api_mapping.py new file mode 100644 index 00000000000..0b180d54f01 --- /dev/null +++ b/examples/event_handler_rest/src/custom_api_mapping.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver(strip_prefixes=["/payment"]) + + +@app.get("/subscriptions/") +@tracer.capture_method +def get_subscription(subscription): + return {"subscription_id": subscription} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/custom_serializer.py b/examples/event_handler_rest/src/custom_serializer.py new file mode 100644 index 00000000000..cfb8cefd2d9 --- /dev/null +++ b/examples/event_handler_rest/src/custom_serializer.py @@ -0,0 +1,58 @@ +import json +from dataclasses import asdict, dataclass, is_dataclass +from json import JSONEncoder + +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@dataclass +class Todo: + userId: str + id: str # noqa: A003 VNE003 "id" field is reserved + title: str + completed: bool + + +class DataclassCustomEncoder(JSONEncoder): + """A custom JSON encoder to serialize dataclass obj""" + + def default(self, obj): + # Only called for values that aren't JSON serializable + # where `obj` will be an instance of Todo in this example + return asdict(obj) if is_dataclass(obj) else super().default(obj) + + +def custom_serializer(obj) -> str: + """Your custom serializer function APIGatewayRestResolver will use""" + return json.dumps(obj, separators=(",", ":"), cls=DataclassCustomEncoder) + + +app = APIGatewayRestResolver(serializer=custom_serializer) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + ret: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + ret.raise_for_status() + todos = [Todo(**todo) for todo in ret.json()] + + # for brevity, we'll limit to the first 10 only + return {"todos": todos[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/debug_mode.py b/examples/event_handler_rest/src/debug_mode.py new file mode 100644 index 00000000000..47ffb8905eb --- /dev/null +++ b/examples/event_handler_rest/src/debug_mode.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver(debug=True) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/dynamic_routes.json b/examples/event_handler_rest/src/dynamic_routes.json new file mode 100644 index 00000000000..23e8261d283 --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes.json @@ -0,0 +1,5 @@ +{ + "resource": "/todos/{id}", + "path": "/todos/1", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/dynamic_routes.py b/examples/event_handler_rest/src/dynamic_routes.py new file mode 100644 index 00000000000..2ee2dc21044 --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes.py @@ -0,0 +1,27 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{todo_id}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/dynamic_routes_catch_all.json b/examples/event_handler_rest/src/dynamic_routes_catch_all.json new file mode 100644 index 00000000000..c9395f23027 --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes_catch_all.json @@ -0,0 +1,5 @@ +{ + "resource": "/{proxy+}", + "path": "/any/route/should/work", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/dynamic_routes_catch_all.py b/examples/event_handler_rest/src/dynamic_routes_catch_all.py new file mode 100644 index 00000000000..f615f2a8dee --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes_catch_all.py @@ -0,0 +1,21 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get(".+") +@tracer.capture_method +def catch_any_route_get_method(): + return {"path_received": app.current_event.path} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/exception_handling.py b/examples/event_handler_rest/src/exception_handling.py new file mode 100644 index 00000000000..fdac8589299 --- /dev/null +++ b/examples/event_handler_rest/src/exception_handling.py @@ -0,0 +1,43 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, content_types +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.exception_handler(ValueError) +def handle_invalid_limit_qs(ex: ValueError): # receives exception raised + metadata = {"path": app.current_event.path, "query_strings": app.current_event.query_string_parameters} + logger.error(f"Malformed request: {ex}", extra=metadata) + + return Response( + status_code=400, + content_type=content_types.TEXT_PLAIN, + body="Invalid request parameters.", + ) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + # educational purpose only: we should receive a `ValueError` + # if a query string value for `limit` cannot be coerced to int + max_results: int = int(app.current_event.get_query_string_value(name="limit", default_value=0)) + + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos?limit={max_results}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/fine_grained_responses.py b/examples/event_handler_rest/src/fine_grained_responses.py new file mode 100644 index 00000000000..3e477160307 --- /dev/null +++ b/examples/event_handler_rest/src/fine_grained_responses.py @@ -0,0 +1,36 @@ +from http import HTTPStatus +from uuid import uuid4 + +import requests + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: requests.Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + custom_headers = {"X-Transaction-Id": f"{uuid4()}"} + + return Response( + status_code=HTTPStatus.OK.value, # 200 + content_type=content_types.APPLICATION_JSON, + body=todos.json()[:10], + headers=custom_headers, + ) + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/fine_grained_responses_output.json b/examples/event_handler_rest/src/fine_grained_responses_output.json new file mode 100644 index 00000000000..c3d58098e80 --- /dev/null +++ b/examples/event_handler_rest/src/fine_grained_responses_output.json @@ -0,0 +1,9 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "X-Transaction-Id": "3490eea9-791b-47a0-91a4-326317db61a9" + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/examples/event_handler_rest/src/getting_started_alb_api_resolver.py b/examples/event_handler_rest/src/getting_started_alb_api_resolver.py new file mode 100644 index 00000000000..612823625ec --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_alb_api_resolver.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import ALBResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = ALBResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/getting_started_http_api_resolver.py b/examples/event_handler_rest/src/getting_started_http_api_resolver.py new file mode 100644 index 00000000000..e976ef4169f --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_http_api_resolver.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayHttpResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayHttpResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver.json b/examples/event_handler_rest/src/getting_started_rest_api_resolver.json new file mode 100644 index 00000000000..92d3e40f139 --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver.json @@ -0,0 +1,58 @@ +{ + "body": "", + "resource": "/todos", + "path": "/todos", + "httpMethod": "GET", + "isBase64Encoded": false, + "queryStringParameters": {}, + "multiValueQueryStringParameters": {}, + "pathParameters": {}, + "stageVariables": {}, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https" + }, + "multiValueHeaders": {}, + "requestContext": { + "accountId": "123456789012", + "resourceId": "123456", + "stage": "Prod", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "requestTime": "25/Jul/2020:12:34:56 +0000", + "requestTimeEpoch": 1428582896000, + "identity": { + "cognitoIdentityPoolId": null, + "accountId": null, + "cognitoIdentityId": null, + "caller": null, + "accessKey": null, + "sourceIp": "127.0.0.1", + "cognitoAuthenticationType": null, + "cognitoAuthenticationProvider": null, + "userArn": null, + "userAgent": "Custom User Agent String", + "user": null + }, + "path": "/Prod/todos", + "resourcePath": "/todos", + "httpMethod": "GET", + "apiId": "1234567890", + "protocol": "HTTP/1.1" + } +} diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver.py b/examples/event_handler_rest/src/getting_started_rest_api_resolver.py new file mode 100644 index 00000000000..3b30b5810f2 --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json new file mode 100644 index 00000000000..2ef3714531f --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json @@ -0,0 +1,8 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json" + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/examples/event_handler_rest/src/http_methods.json b/examples/event_handler_rest/src/http_methods.json new file mode 100644 index 00000000000..e0f775d72df --- /dev/null +++ b/examples/event_handler_rest/src/http_methods.json @@ -0,0 +1,6 @@ +{ + "resource": "/todos", + "path": "/todos", + "httpMethod": "POST", + "body": "{\"title\": \"foo\", \"userId\": 1, \"completed\": false}" +} diff --git a/examples/event_handler_rest/src/http_methods.py b/examples/event_handler_rest/src/http_methods.py new file mode 100644 index 00000000000..47eb1499a38 --- /dev/null +++ b/examples/event_handler_rest/src/http_methods.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.post("/todos") +@tracer.capture_method +def create_todo(): + todo_data: dict = app.current_event.json_body # deserialize json str to dict + todo: Response = requests.post("https://jsonplaceholder.typicode.com/todos", data=todo_data) + todo.raise_for_status() + + return {"todo": todo.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/http_methods_multiple.py b/examples/event_handler_rest/src/http_methods_multiple.py new file mode 100644 index 00000000000..a482c96d80f --- /dev/null +++ b/examples/event_handler_rest/src/http_methods_multiple.py @@ -0,0 +1,29 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +# PUT and POST HTTP requests to the path /hello will route to this function +@app.route("/todos", method=["PUT", "POST"]) +@tracer.capture_method +def create_todo(): + todo_data: dict = app.current_event.json_body # deserialize json str to dict + todo: Response = requests.post("https://jsonplaceholder.typicode.com/todos", data=todo_data) + todo.raise_for_status() + + return {"todo": todo.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/not_found_routes.py b/examples/event_handler_rest/src/not_found_routes.py new file mode 100644 index 00000000000..889880292c0 --- /dev/null +++ b/examples/event_handler_rest/src/not_found_routes.py @@ -0,0 +1,35 @@ +import requests + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types +from aws_lambda_powertools.event_handler.exceptions import NotFoundError +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.not_found +@tracer.capture_method +def handle_not_found_errors(exc: NotFoundError) -> Response: + logger.info(f"Not found route: {app.current_event.path}") + return Response(status_code=418, content_type=content_types.TEXT_PLAIN, body="I'm a teapot!") + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: requests.Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/raising_http_errors.py b/examples/event_handler_rest/src/raising_http_errors.py new file mode 100644 index 00000000000..97e7cc5048f --- /dev/null +++ b/examples/event_handler_rest/src/raising_http_errors.py @@ -0,0 +1,59 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.event_handler.exceptions import ( + BadRequestError, + InternalServerError, + NotFoundError, + ServiceError, + UnauthorizedError, +) +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get(rule="/bad-request-error") +def bad_request_error(): + raise BadRequestError("Missing required parameter") # HTTP 400 + + +@app.get(rule="/unauthorized-error") +def unauthorized_error(): + raise UnauthorizedError("Unauthorized") # HTTP 401 + + +@app.get(rule="/not-found-error") +def not_found_error(): + raise NotFoundError # HTTP 404 + + +@app.get(rule="/internal-server-error") +def internal_server_error(): + raise InternalServerError("Internal server error") # HTTP 500 + + +@app.get(rule="/service-error", cors=True) +def service_error(): + raise ServiceError(502, "Something went wrong!") + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/setting_cors.py b/examples/event_handler_rest/src/setting_cors.py new file mode 100644 index 00000000000..101e013e552 --- /dev/null +++ b/examples/event_handler_rest/src/setting_cors.py @@ -0,0 +1,44 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, CORSConfig +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) +app = APIGatewayRestResolver(cors=cors_config) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@app.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{todo_id}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +@app.get("/healthcheck", cors=False) # optionally removes CORS for a given route +@tracer.capture_method +def am_i_alive(): + return {"am_i_alive": "yes"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/setting_cors_output.json b/examples/event_handler_rest/src/setting_cors_output.json new file mode 100644 index 00000000000..ca86e892d38 --- /dev/null +++ b/examples/event_handler_rest/src/setting_cors_output.json @@ -0,0 +1,10 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "Access-Control-Allow-Origin": "https://www.example.com", + "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/examples/event_handler_rest/src/split_route.py b/examples/event_handler_rest/src/split_route.py new file mode 100644 index 00000000000..6c0933ea08e --- /dev/null +++ b/examples/event_handler_rest/src/split_route.py @@ -0,0 +1,18 @@ +import split_route_module + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() +app.include_router(split_route_module.router) + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/split_route_module.py b/examples/event_handler_rest/src/split_route_module.py new file mode 100644 index 00000000000..eeb696ede56 --- /dev/null +++ b/examples/event_handler_rest/src/split_route_module.py @@ -0,0 +1,33 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.event_handler.api_gateway import Router + +tracer = Tracer() +router = Router() + +endpoint = "https://jsonplaceholder.typicode.com/todos" + + +@router.get("/todos") +@tracer.capture_method +def get_todos(): + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(endpoint, headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@router.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(f"{endpoint}/{todo_id}", headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + return {"todos": todos.json()} diff --git a/examples/event_handler_rest/src/split_route_prefix.py b/examples/event_handler_rest/src/split_route_prefix.py new file mode 100644 index 00000000000..01129c80148 --- /dev/null +++ b/examples/event_handler_rest/src/split_route_prefix.py @@ -0,0 +1,19 @@ +import split_route_module + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() +# prefix '/todos' to any route in `split_route_module.router` +app.include_router(split_route_module.router, prefix="/todos") + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/split_route_prefix_module.py b/examples/event_handler_rest/src/split_route_prefix_module.py new file mode 100644 index 00000000000..b4035282776 --- /dev/null +++ b/examples/event_handler_rest/src/split_route_prefix_module.py @@ -0,0 +1,36 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.event_handler.api_gateway import Router + +tracer = Tracer() +router = Router() + +endpoint = "https://jsonplaceholder.typicode.com/todos" + + +@router.get("/") +@tracer.capture_method +def get_todos(): + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(endpoint, headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@router.get("/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(f"{endpoint}/{todo_id}", headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + return {"todos": todos.json()} + + +# many more routes diff --git a/examples/logger/src/append_and_remove_keys.json b/examples/logger/src/append_and_remove_keys_output.json similarity index 100% rename from examples/logger/src/append_and_remove_keys.json rename to examples/logger/src/append_and_remove_keys_output.json diff --git a/examples/logger/src/set_correlation_id_jmespath_event.json b/examples/logger/src/set_correlation_id_jmespath.json similarity index 100% rename from examples/logger/src/set_correlation_id_jmespath_event.json rename to examples/logger/src/set_correlation_id_jmespath.json diff --git a/examples/logger/src/set_correlation_id_method_event.json b/examples/logger/src/set_correlation_id_method.json similarity index 100% rename from examples/logger/src/set_correlation_id_method_event.json rename to examples/logger/src/set_correlation_id_method.json diff --git a/examples/tracer/src/tracer_reuse.py b/examples/tracer/src/tracer_reuse.py index 5f12f82b714..bdfe7bc9d91 100644 --- a/examples/tracer/src/tracer_reuse.py +++ b/examples/tracer/src/tracer_reuse.py @@ -1,4 +1,4 @@ -from tracer_reuse_payment import collect_payment +from tracer_reuse_module import collect_payment from aws_lambda_powertools import Tracer from aws_lambda_powertools.utilities.typing import LambdaContext diff --git a/examples/tracer/src/tracer_reuse_payment.py b/examples/tracer/src/tracer_reuse_module.py similarity index 100% rename from examples/tracer/src/tracer_reuse_payment.py rename to examples/tracer/src/tracer_reuse_module.py diff --git a/layer/.gitignore b/layer/.gitignore new file mode 100644 index 00000000000..37833f8beb2 --- /dev/null +++ b/layer/.gitignore @@ -0,0 +1,10 @@ +*.swp +package-lock.json +__pycache__ +.pytest_cache +.venv +*.egg-info + +# CDK asset staging directory +.cdk.staging +cdk.out diff --git a/layer/README.md b/layer/README.md new file mode 100644 index 00000000000..99da0083ffc --- /dev/null +++ b/layer/README.md @@ -0,0 +1,27 @@ + +# CDK Powertools layer + +This is a CDK project to build and deploy AWS Lambda Powertools [Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-concepts.html#gettingstarted-concepts-layer) to multiple commercial regions. + +## Build the layer + +To build the layer construct you need to provide the Powertools version that is [available in PyPi](https://pypi.org/project/aws-lambda-powertools/). +You can pass it as a context variable when running `synth` or `deploy`, + +```shell +cdk synth --context version=1.25.1 +``` + +## Canary stack + +We use a canary stack to verify that the deployment is successful and we can use the layer by adding it to a newly created Lambda function. +The canary is deployed after the layer construct. Because the layer ARN is created during the deploy we need to pass this information async via SSM parameter. +To achieve that we use SSM parameter store to pass the layer ARN to the canary. +The layer stack writes the layer ARN after the deployment as SSM parameter and the canary stacks reads this information and adds the layer to the function. + +## Version tracking + +AWS Lambda versions Lambda layers by incrementing a number at the end of the ARN. +This makes it challenging to know which Powertools version a layer contains. +For better tracking of the ARNs and the corresponding version we need to keep track which powertools version was deployed to which layer. +To achieve that we created two components. First, we created a version tracking app which receives events via EventBridge. Second, after a successful canary deployment we send the layer ARN, Powertools version, and the region to this EventBridge. diff --git a/layer/app.py b/layer/app.py new file mode 100644 index 00000000000..78e99b17654 --- /dev/null +++ b/layer/app.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 + +import aws_cdk as cdk + +from layer.canary_stack import CanaryStack +from layer.layer_stack import LayerStack + +app = cdk.App() + +POWERTOOLS_VERSION: str = app.node.try_get_context("version") +SSM_PARAM_LAYER_ARN: str = "/layers/powertools-layer-arn" + +if not POWERTOOLS_VERSION: + raise ValueError( + "Please set the version for Powertools by passing the '--context=version:' parameter to the CDK " + "synth step." + ) + +LayerStack(app, "LayerStack", powertools_version=POWERTOOLS_VERSION, ssm_paramter_layer_arn=SSM_PARAM_LAYER_ARN) + +CanaryStack(app, "CanaryStack", powertools_version=POWERTOOLS_VERSION, ssm_paramter_layer_arn=SSM_PARAM_LAYER_ARN) + +app.synth() diff --git a/layer/cdk.json b/layer/cdk.json new file mode 100644 index 00000000000..c120c5f4765 --- /dev/null +++ b/layer/cdk.json @@ -0,0 +1,35 @@ +{ + "app": "python3 app.py", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "requirements*.txt", + "source.bat", + "**/__init__.py", + "python/__pycache__", + "tests" + ] + }, + "context": { + "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, + "@aws-cdk/core:stackRelativeExports": true, + "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, + "@aws-cdk/aws-lambda:recognizeVersionProps": true, + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ] + } +} diff --git a/layer/layer/__init__.py b/layer/layer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/layer/layer/canary/app.py b/layer/layer/canary/app.py new file mode 100644 index 00000000000..31db94dd92b --- /dev/null +++ b/layer/layer/canary/app.py @@ -0,0 +1,99 @@ +import datetime +import json +import os +from importlib.metadata import version + +import boto3 + +from aws_lambda_powertools import Logger, Metrics, Tracer + +logger = Logger(service="version-track") +tracer = Tracer() +metrics = Metrics(namespace="powertools-layer-canary", service="PowertoolsLayerCanary") + +layer_arn = os.getenv("POWERTOOLS_LAYER_ARN") +powertools_version = os.getenv("POWERTOOLS_VERSION") +stage = os.getenv("LAYER_PIPELINE_STAGE") +event_bus_arn = os.getenv("VERSION_TRACKING_EVENT_BUS_ARN") + + +def handler(event): + logger.info("Running checks") + check_envs() + verify_powertools_version() + send_notification() + return True + + +@logger.inject_lambda_context(log_event=True) +def on_event(event, context): + request_type = event["RequestType"] + # we handle only create events, because we recreate the canary on each run + if request_type == "Create": + return on_create(event) + + return "Nothing to be processed" + + +def on_create(event): + props = event["ResourceProperties"] + logger.info("create new resource with properties %s" % props) + handler(event) + + +def check_envs(): + logger.info('Checking required envs ["POWERTOOLS_LAYER_ARN", "AWS_REGION", "STAGE"]') + if not layer_arn: + raise ValueError("POWERTOOLS_LAYER_ARN is not set. Aborting...") + if not powertools_version: + raise ValueError("POWERTOOLS_VERSION is not set. Aborting...") + if not stage: + raise ValueError("LAYER_PIPELINE_STAGE is not set. Aborting...") + if not event_bus_arn: + raise ValueError("VERSION_TRACKING_EVENT_BUS_ARN is not set. Aborting...") + logger.info("All envs configured, continue...") + + +def verify_powertools_version() -> None: + """ + fetches the version that we import from the powertools layer and compares + it with expected version set in environment variable, which we pass during deployment. + :raise ValueError if the expected version is not the same as the version we get from the layer + """ + logger.info("Checking Powertools version in library...") + current_version = version("aws_lambda_powertools") + if powertools_version != current_version: + raise ValueError( + f'Expected powertoosl version is "{powertools_version}", but layer contains version "{current_version}"' + ) + logger.info(f"Current Powertools version is: {current_version}") + + +def send_notification(): + """ + sends an event to version tracking event bridge + """ + event = { + "Time": datetime.datetime.now(), + "Source": "powertools.layer.canary", + "EventBusName": event_bus_arn, + "DetailType": "deployment", + "Detail": json.dumps( + { + "id": "powertools-python", + "stage": stage, + "region": os.environ["AWS_REGION"], + "version": powertools_version, + "layerArn": layer_arn, + } + ), + } + + logger.info(f"sending notification event: {event}") + + client = boto3.client("events", region_name="eu-central-1") + resp = client.put_events(Entries=[event]) + logger.info(resp) + if resp["FailedEntryCount"] != 0: + logger.error(resp) + raise ValueError("Failed to send deployment notification to version tracking") diff --git a/layer/layer/canary_stack.py b/layer/layer/canary_stack.py new file mode 100644 index 00000000000..15bc80214d3 --- /dev/null +++ b/layer/layer/canary_stack.py @@ -0,0 +1,75 @@ +import uuid + +from aws_cdk import CfnParameter, CustomResource, Duration, Stack +from aws_cdk.aws_iam import Effect, ManagedPolicy, PolicyStatement, Role, ServicePrincipal +from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime +from aws_cdk.aws_logs import RetentionDays +from aws_cdk.aws_ssm import StringParameter +from aws_cdk.custom_resources import Provider +from constructs import Construct + + +class CanaryStack(Stack): + def __init__( + self, + scope: Construct, + construct_id: str, + powertools_version: str, + ssm_paramter_layer_arn: str, + **kwargs, + ) -> None: + super().__init__(scope, construct_id, **kwargs) + + VERSION_TRACKING_EVENT_BUS_ARN: str = ( + "arn:aws:events:eu-central-1:027876851704:event-bus/VersionTrackingEventBus" + ) + + layer_arn = StringParameter.from_string_parameter_attributes( + self, "LayerVersionArnParam", parameter_name=ssm_paramter_layer_arn + ).string_value + + layer = LayerVersion.from_layer_version_arn(self, "PowertoolsLayer", layer_version_arn=layer_arn) + deploy_stage = CfnParameter(self, "DeployStage", description="Deployment stage for canary").value_as_string + + execution_role = Role(self, "LambdaExecutionRole", assumed_by=ServicePrincipal("lambda.amazonaws.com")) + + execution_role.add_managed_policy( + ManagedPolicy.from_aws_managed_policy_name("service-role/AWSLambdaBasicExecutionRole") + ) + + execution_role.add_to_policy( + PolicyStatement(effect=Effect.ALLOW, actions=["lambda:GetFunction"], resources=["*"]) + ) + + canary_lambda = Function( + self, + "CanaryLambdaFunction", + function_name="CanaryLambdaFunction", + code=Code.from_asset("layer/canary"), + handler="app.on_event", + layers=[layer], + memory_size=512, + timeout=Duration.seconds(10), + runtime=Runtime.PYTHON_3_9, + log_retention=RetentionDays.ONE_MONTH, + role=execution_role, + environment={ + "POWERTOOLS_VERSION": powertools_version, + "POWERTOOLS_LAYER_ARN": layer_arn, + "VERSION_TRACKING_EVENT_BUS_ARN": VERSION_TRACKING_EVENT_BUS_ARN, + "LAYER_PIPELINE_STAGE": deploy_stage, + }, + ) + + canary_lambda.add_to_role_policy( + PolicyStatement( + effect=Effect.ALLOW, actions=["events:PutEvents"], resources=[VERSION_TRACKING_EVENT_BUS_ARN] + ) + ) + + # custom resource provider configuration + provider = Provider( + self, "CanaryCustomResource", on_event_handler=canary_lambda, log_retention=RetentionDays.ONE_MONTH + ) + # force to recreate resource on each deployment with randomized name + CustomResource(self, f"CanaryTrigger-{str(uuid.uuid4())[0:7]}", service_token=provider.service_token) diff --git a/layer/layer/layer_stack.py b/layer/layer/layer_stack.py new file mode 100644 index 00000000000..8b32de9c206 --- /dev/null +++ b/layer/layer/layer_stack.py @@ -0,0 +1,19 @@ +from aws_cdk import Stack +from aws_cdk.aws_ssm import StringParameter +from cdk_lambda_powertools_python_layer import LambdaPowertoolsLayer +from constructs import Construct + + +class LayerStack(Stack): + def __init__( + self, scope: Construct, construct_id: str, powertools_version: str, ssm_paramter_layer_arn: str, **kwargs + ) -> None: + super().__init__(scope, construct_id, **kwargs) + + layer = LambdaPowertoolsLayer( + self, "Layer", layer_version_name="AWSLambdaPowertoolsPython", version=powertools_version + ) + + layer.add_permission("PublicLayerAccess", account_id="*") + + StringParameter(self, "VersionArn", parameter_name=ssm_paramter_layer_arn, string_value=layer.layer_version_arn) diff --git a/layer/requirements-dev.txt b/layer/requirements-dev.txt new file mode 100644 index 00000000000..f3ec7d732b5 --- /dev/null +++ b/layer/requirements-dev.txt @@ -0,0 +1,2 @@ +pytest==6.2.5 +boto3==1.24.22 diff --git a/layer/requirements.txt b/layer/requirements.txt new file mode 100644 index 00000000000..ca8c53b1eaf --- /dev/null +++ b/layer/requirements.txt @@ -0,0 +1,76 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --generate-hashes requirements.txt +# +attrs==21.4.0 \ + --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ + --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd + # via + # -r requirements.txt + # cattrs + # jsii +aws-cdk-lib==2.31.1 \ + --hash=sha256:a07f6a247be110e874af374fa683d6c7eba86dfc9781cb555428b534c75bd4c0 \ + --hash=sha256:a3868c367cab3cf09e6bb68405e31f4342fc4a4905ccc3e3fdde133d520206c0 + # via + # -r requirements.txt + # cdk-lambda-powertools-python-layer +cattrs==22.1.0 \ + --hash=sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6 \ + --hash=sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364 + # via + # -r requirements.txt + # jsii +cdk-lambda-powertools-python-layer==2.0.49 \ + --hash=sha256:8055fc691539f16e22a40e3d3df9c3f59fb28012437b08c47c639aefb001f1b2 \ + --hash=sha256:9b0a7b7344f9ccb486564af728cefeac743687bfb131631e6d9171a55800dbac + # via -r requirements.txt +constructs==10.1.49 \ + --hash=sha256:3abba3e9e06f35f0a5e15c10e2e080f4f1fd449fe008fd650f2a5fcc6f729cc3 \ + --hash=sha256:89f0984b64bb987b428c3bfec51e2d5524b8605c9e2c2043491de9e3b007a3cf + # via + # -r requirements.txt + # aws-cdk-lib + # cdk-lambda-powertools-python-layer +exceptiongroup==1.0.0rc8 \ + --hash=sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a \ + --hash=sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035 + # via + # -r requirements.txt + # cattrs +jsii==1.62.0 \ + --hash=sha256:c22ac7373260fbabdb012faba717a8a4dbd933120cee373905030fd66956a65a \ + --hash=sha256:d124b0f350fd206e0488d3bb83dc58832f11e64fc728fd3a10096872d8a3a938 + # via + # -r requirements.txt + # aws-cdk-lib + # cdk-lambda-powertools-python-layer + # constructs +publication==0.0.3 \ + --hash=sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6 \ + --hash=sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4 + # via + # -r requirements.txt + # aws-cdk-lib + # cdk-lambda-powertools-python-layer + # constructs +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via + # -r requirements.txt + # jsii +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # -r requirements.txt + # python-dateutil +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via + # -r requirements.txt + # jsii diff --git a/poetry.lock b/poetry.lock index 77300c37213..a1f72d121a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,17 +8,30 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.2.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "aws-cdk-lib" +version = "2.23.0" +description = "Version 2 of the AWS Cloud Development Kit library" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" [[package]] name = "aws-xray-sdk" @@ -106,6 +119,33 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.13.8)"] +[[package]] +name = "cattrs" +version = "1.0.0" +description = "Composable complex class support for attrs." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.3" + +[package.extras] +dev = ["bumpversion", "wheel", "watchdog", "flake8", "tox", "coverage", "sphinx", "pytest", "hypothesis", "pendulum"] + +[[package]] +name = "cattrs" +version = "22.1.0" +description = "Composable complex class support for attrs and dataclasses." +category = "dev" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +attrs = ">=20" +exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} +typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and python_version < \"3.8\""} + [[package]] name = "certifi" version = "2021.10.8" @@ -145,6 +185,18 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "constructs" +version = "10.1.1" +description = "A programming model for software-defined state" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" + [[package]] name = "coverage" version = "6.2" @@ -167,6 +219,14 @@ category = "main" optional = false python-versions = ">=3.6, <3.7" +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + [[package]] name = "dnspython" version = "2.1.0" @@ -202,9 +262,31 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "exceptiongroup" +version = "1.0.0rc8" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +testing = ["pre-commit"] + [[package]] name = "fastjsonschema" -version = "2.15.3" +version = "2.16.1" description = "Fastest Python implementation of JSON schema" category = "main" optional = false @@ -408,6 +490,21 @@ zipp = ">=0.5" docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + [[package]] name = "iniconfig" version = "1.1.1" @@ -452,6 +549,24 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "jsii" +version = "1.57.0" +description = "Python client for jsii runtime" +category = "dev" +optional = false +python-versions = "~=3.6" + +[package.dependencies] +attrs = ">=21.2,<22.0" +cattrs = [ + {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, + {version = ">=1.8,<22.2", markers = "python_version >= \"3.7\""}, +] +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +python-dateutil = "*" +typing-extensions = ">=3.7,<5.0" + [[package]] name = "mako" version = "1.1.6" @@ -615,8 +730,8 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-appconfig" -version = "1.24.0" -description = "Type annotations for boto3.AppConfig 1.24.0 service generated with mypy-boto3-builder 7.6.1" +version = "1.24.29" +description = "Type annotations for boto3.AppConfig 1.24.29 service generated with mypy-boto3-builder 7.7.3" category = "dev" optional = false python-versions = ">=3.6" @@ -624,10 +739,21 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-cloudwatch" +version = "1.24.0" +description = "Type annotations for boto3.CloudWatch 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-dynamodb" -version = "1.24.12" -description = "Type annotations for boto3.DynamoDB 1.24.12 service generated with mypy-boto3-builder 7.7.1" +version = "1.24.27" +description = "Type annotations for boto3.DynamoDB 1.24.27 service generated with mypy-boto3-builder 7.6.0" category = "dev" optional = false python-versions = ">=3.6" @@ -635,6 +761,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-lambda" +version = "1.24.0" +description = "Type annotations for boto3.Lambda 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-secretsmanager" version = "1.24.11.post3" @@ -657,6 +794,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-xray" +version = "1.24.0" +description = "Type annotations for boto3.XRay 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -731,6 +879,14 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "publication" +version = "0.0.3" +description = "Publication helps you maintain public-api-friendly modules by preventing unintentional access to private implementation details via introspection." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "py" version = "1.11.0" @@ -877,6 +1033,18 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-forked" +version = "1.4.0" +description = "run tests in isolated forked subprocesses" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + [[package]] name = "pytest-mock" version = "3.6.1" @@ -891,6 +1059,24 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "tox", "pytest-asyncio"] +[[package]] +name = "pytest-xdist" +version = "2.5.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -952,6 +1138,18 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "ruamel.yaml" version = "0.17.17" @@ -1125,7 +1323,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "3f3f95ab1a8cf1351639687362e20bd26b784330d309f49a2f5a307682fe5879" +content-hash = "160d80d73190474a97b16859405adf04618bc26564b9b927e6c31ab706f247d1" [metadata.files] atomicwrites = [ @@ -1133,8 +1331,12 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +aws-cdk-lib = [ + {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, + {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, ] aws-xray-sdk = [ {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, @@ -1156,6 +1358,12 @@ botocore = [ {file = "botocore-1.24.44-py3-none-any.whl", hash = "sha256:ed07772c924984e5b3c1005f7ba4600cebd4169c23307cf6e92cccadf0b5d2e7"}, {file = "botocore-1.24.44.tar.gz", hash = "sha256:0030a11eac972be46859263820885ba650503622c5acfe58966f482d42cc538d"}, ] +cattrs = [ + {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, + {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, + {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, + {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, +] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, @@ -1172,6 +1380,10 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +constructs = [ + {file = "constructs-10.1.1-py3-none-any.whl", hash = "sha256:c1f3deb196f54e070ded3c92c4339f73ef2b6022d35fb34908c0ebfa7ef8a640"}, + {file = "constructs-10.1.1.tar.gz", hash = "sha256:6ce0dd1352367237b5d7c51a25740482c852735d2a5e067c536acc1657f39ea5"}, +] coverage = [ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, @@ -1225,6 +1437,10 @@ dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] dnspython = [ {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, @@ -1236,9 +1452,17 @@ email-validator = [ eradicate = [ {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, ] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, + {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, +] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] fastjsonschema = [ - {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, - {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, + {file = "fastjsonschema-2.16.1-py3-none-any.whl", hash = "sha256:2f7158c4de792555753d6c2277d6a2af2d406dfd97aeca21d17173561ede4fe6"}, + {file = "fastjsonschema-2.16.1.tar.gz", hash = "sha256:d6fa3ffbe719768d70e298b9fb847484e2bdfdb7241ed052b8d57a9294a8c334"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -1302,6 +1526,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, ] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1318,6 +1546,10 @@ jmespath = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] +jsii = [ + {file = "jsii-1.57.0-py3-none-any.whl", hash = "sha256:4888091986a9ed8d50b042cc9c35a9564dd54c19e78adb890bf06d9ffac1b325"}, + {file = "jsii-1.57.0.tar.gz", hash = "sha256:ff7a3c51c1a653dd8a4342043b5f8e40b928bc617e3141e0d5d66175d22a754b"}, +] mako = [ {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, @@ -1454,12 +1686,20 @@ mypy = [ {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"}, ] mypy-boto3-appconfig = [ - {file = "mypy-boto3-appconfig-1.24.0.tar.gz", hash = "sha256:3bb38c2819b78c72fd9c031058edf5e547ad549d58e052928a4f397823a51dbd"}, - {file = "mypy_boto3_appconfig-1.24.0-py3-none-any.whl", hash = "sha256:ca53b0b9606f13257dd0feb800d36531f2eba54f46bd9db7765f69baf9583485"}, + {file = "mypy-boto3-appconfig-1.24.29.tar.gz", hash = "sha256:10583d309a9db99babfbe85d3b6467b49b3509a57e4f8771da239f6d5cb3731b"}, + {file = "mypy_boto3_appconfig-1.24.29-py3-none-any.whl", hash = "sha256:e9d9e2e25fdd82bffc6262dc184edf5d0d3d9fbb0ab35e597a1ea57ba13d4d80"}, +] +mypy-boto3-cloudwatch = [ + {file = "mypy-boto3-cloudwatch-1.24.0.tar.gz", hash = "sha256:d19cd71aa07ecc69c1e2f9691af6a81bf1d65267ad4be1f9486bf683370727a5"}, + {file = "mypy_boto3_cloudwatch-1.24.0-py3-none-any.whl", hash = "sha256:82dac27b1dd0ad8969fedf874ea4713b36d37fe04229f7fdaaecf4addb59d4bd"}, ] mypy-boto3-dynamodb = [ - {file = "mypy-boto3-dynamodb-1.24.12.tar.gz", hash = "sha256:4fc6f0f84988ae3d307a43ec31930483828b199f1179cb801238c16cd8be5901"}, - {file = "mypy_boto3_dynamodb-1.24.12-py3-none-any.whl", hash = "sha256:7ad9aa9c23a9f90d0aa0018df3a975e6c1da32f76c11aef60bf1a49cfca840cc"}, + {file = "mypy-boto3-dynamodb-1.24.27.tar.gz", hash = "sha256:c982d24f9b2525a70f408ad40eff69660d56928217597d88860b60436b25efbf"}, + {file = "mypy_boto3_dynamodb-1.24.27-py3-none-any.whl", hash = "sha256:63f7d9755fc5cf2e637edf8d33024050152a53013d1a102716ae0d534563ef07"}, +] +mypy-boto3-lambda = [ + {file = "mypy-boto3-lambda-1.24.0.tar.gz", hash = "sha256:ab425f941d0d50a2b8a20cc13cebe03c3097b122259bf00e7b295d284814bd6f"}, + {file = "mypy_boto3_lambda-1.24.0-py3-none-any.whl", hash = "sha256:a286a464513adf50847bda8573f2dc7adc348234827d1ac0200e610ee9a09b80"}, ] mypy-boto3-secretsmanager = [ {file = "mypy-boto3-secretsmanager-1.24.11.post3.tar.gz", hash = "sha256:f153b3f5ff2c65664a906fb2c97a6598a57da9f1da77679dbaf541051dcff36e"}, @@ -1469,6 +1709,10 @@ mypy-boto3-ssm = [ {file = "mypy-boto3-ssm-1.24.0.tar.gz", hash = "sha256:bab58398947c3627a4e7610cd0f57b525c12fd1d0a6bb862400b6af0a4e684fc"}, {file = "mypy_boto3_ssm-1.24.0-py3-none-any.whl", hash = "sha256:1f17055abb8d70f25e6ece2ef4c0dc74d585744c25a3a833c2985d74165ac0c6"}, ] +mypy-boto3-xray = [ + {file = "mypy-boto3-xray-1.24.0.tar.gz", hash = "sha256:fbe211b7601684a2d4defa2f959286f1441027c15044c0c0013257e22307778a"}, + {file = "mypy_boto3_xray-1.24.0-py3-none-any.whl", hash = "sha256:6b9bc96e7924215fe833fe0d732d5e3ce98f7739b373432b9735a9905f867171"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -1497,6 +1741,10 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +publication = [ + {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, + {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -1577,10 +1825,18 @@ pytest-cov = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] +pytest-forked = [ + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, +] pytest-mock = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, ] +pytest-xdist = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1628,6 +1884,10 @@ requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] +retry = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] "ruamel.yaml" = [ {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, diff --git a/pyproject.toml b/pyproject.toml index b665a760889..f10a2a45234 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ fastjsonschema = "^2.14.5" boto3 = "^1.18" pydantic = {version = "^1.8.2", optional = true } email-validator = {version = "*", optional = true } +mypy-boto3-cloudwatch = "^1.24.0" +mypy-boto3-lambda = "^1.24.0" +mypy-boto3-xray = "^1.24.0" [tool.poetry.dev-dependencies] # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. @@ -57,8 +60,11 @@ mypy = "^0.961" mkdocs-material = "^8.2.7" mypy-boto3-secretsmanager = "^1.24.11" mypy-boto3-ssm = "^1.24.0" -mypy-boto3-appconfig = "^1.24.0" -mypy-boto3-dynamodb = "^1.24.12" +mypy-boto3-appconfig = "^1.24.29" +mypy-boto3-dynamodb = "^1.24.27" +retry = "^0.9.2" +pytest-xdist = "^2.5.0" +aws-cdk-lib = "^2.23.0" pytest-benchmark = "^3.4.1" @@ -132,7 +138,9 @@ exclude = ''' minversion = "6.0" addopts = "-ra -vv" testpaths = "./tests" -markers = "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')" +markers = [ + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", +] [build-system] requires = ["poetry>=0.12"] diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py new file mode 100644 index 00000000000..4be6a26c6a6 --- /dev/null +++ b/tests/e2e/conftest.py @@ -0,0 +1,65 @@ +import datetime +import sys +import uuid +from dataclasses import dataclass + +import boto3 + +# We only need typing_extensions for python versions <3.8 +if sys.version_info >= (3, 8): + from typing import TypedDict +else: + from typing_extensions import TypedDict + +from typing import Dict, Generator, Optional + +import pytest +from e2e.utils import helpers, infrastructure + + +class LambdaConfig(TypedDict): + parameters: dict + environment_variables: Dict[str, str] + + +@dataclass +class InfrastructureOutput: + arns: Dict[str, str] + execution_time: datetime.datetime + + def get_lambda_arns(self) -> Dict[str, str]: + return self.arns + + def get_lambda_function_arn(self, cf_output_name: str) -> Optional[str]: + return self.arns.get(cf_output_name) + + def get_lambda_function_name(self, cf_output_name: str) -> Optional[str]: + lambda_arn = self.get_lambda_function_arn(cf_output_name=cf_output_name) + return lambda_arn.split(":")[-1] if lambda_arn else None + + def get_lambda_execution_time(self) -> datetime.datetime: + return self.execution_time + + def get_lambda_execution_time_timestamp(self) -> int: + return int(self.execution_time.timestamp() * 1000) + + +@pytest.fixture(scope="module") +def create_infrastructure(config, request) -> Generator[Dict[str, str], None, None]: + stack_name = f"test-lambda-{uuid.uuid4()}" + test_dir = request.fspath.dirname + handlers_dir = f"{test_dir}/handlers/" + + infra = infrastructure.Infrastructure(stack_name=stack_name, handlers_dir=handlers_dir, config=config) + yield infra.deploy(Stack=infrastructure.InfrastructureStack) + infra.delete() + + +@pytest.fixture(scope="module") +def execute_lambda(create_infrastructure) -> InfrastructureOutput: + execution_time = datetime.datetime.utcnow() + session = boto3.Session() + client = session.client("lambda") + for _, arn in create_infrastructure.items(): + helpers.trigger_lambda(lambda_arn=arn, client=client) + return InfrastructureOutput(arns=create_infrastructure, execution_time=execution_time) diff --git a/tests/e2e/logger/__init__.py b/tests/e2e/logger/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/logger/handlers/basic_handler.py b/tests/e2e/logger/handlers/basic_handler.py new file mode 100644 index 00000000000..34d7fb4678a --- /dev/null +++ b/tests/e2e/logger/handlers/basic_handler.py @@ -0,0 +1,17 @@ +import os + +from aws_lambda_powertools import Logger + +logger = Logger() + +MESSAGE = os.environ["MESSAGE"] +ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] + + +@logger.inject_lambda_context(log_event=True) +def lambda_handler(event, context): + logger.debug(MESSAGE) + logger.info(MESSAGE) + logger.append_keys(**{ADDITIONAL_KEY: "test"}) + logger.info(MESSAGE) + return "success" diff --git a/tests/e2e/logger/handlers/no_context_handler.py b/tests/e2e/logger/handlers/no_context_handler.py new file mode 100644 index 00000000000..1347ba98d81 --- /dev/null +++ b/tests/e2e/logger/handlers/no_context_handler.py @@ -0,0 +1,14 @@ +import os + +from aws_lambda_powertools import Logger + +logger = Logger() + +MESSAGE = os.environ["MESSAGE"] +ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] + + +def lambda_handler(event, context): + logger.info(MESSAGE) + logger.append_keys(**{ADDITIONAL_KEY: "test"}) + return "success" diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py new file mode 100644 index 00000000000..ea27b93740b --- /dev/null +++ b/tests/e2e/logger/test_logger.py @@ -0,0 +1,142 @@ +import boto3 +import pytest +from e2e import conftest +from e2e.utils import helpers + + +@pytest.fixture(scope="module") +def config() -> conftest.LambdaConfig: + return { + "parameters": {}, + "environment_variables": { + "MESSAGE": "logger message test", + "LOG_LEVEL": "INFO", + "ADDITIONAL_KEY": "extra_info", + }, + } + + +def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert any( + log.message == config["environment_variables"]["MESSAGE"] + and log.level == config["environment_variables"]["LOG_LEVEL"] + for log in filtered_logs + ) + + +def test_basic_lambda_no_debug_logs_visible( + execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig +): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert not any( + log.message == config["environment_variables"]["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs + ) + + +def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): + # GIVEN + required_keys = ( + "xray_trace_id", + "function_request_id", + "function_arn", + "function_memory_size", + "function_name", + "cold_start", + ) + + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) + + +def test_basic_lambda_additional_key_persistence_basic_lambda( + execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig +): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert any( + log.extra_info + and log.message == config["environment_variables"]["MESSAGE"] + and log.level == config["environment_variables"]["LOG_LEVEL"] + for log in filtered_logs + ) + + +def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): + + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert any(log.message == {} for log in filtered_logs) + + +def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): + + # GIVEN + required_missing_keys = ( + "function_request_id", + "function_arn", + "function_memory_size", + "function_name", + "cold_start", + ) + + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) + + +def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): + + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + + # THEN + assert not any(log.message == {} for log in filtered_logs) diff --git a/tests/e2e/metrics/__init__.py b/tests/e2e/metrics/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/metrics/handlers/basic_handler.py b/tests/e2e/metrics/handlers/basic_handler.py new file mode 100644 index 00000000000..dd2f486d980 --- /dev/null +++ b/tests/e2e/metrics/handlers/basic_handler.py @@ -0,0 +1,14 @@ +import os + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +METRIC_NAME = os.environ["METRIC_NAME"] + +metrics = Metrics() + + +@metrics.log_metrics +def lambda_handler(event, context): + metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1) + return "success" diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py new file mode 100644 index 00000000000..7d3aa7efa61 --- /dev/null +++ b/tests/e2e/metrics/test_metrics.py @@ -0,0 +1,40 @@ +import datetime +import uuid + +import boto3 +import pytest +from e2e import conftest +from e2e.utils import helpers + + +@pytest.fixture(scope="module") +def config() -> conftest.LambdaConfig: + return { + "parameters": {}, + "environment_variables": { + "POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", + "POWERTOOLS_SERVICE_NAME": "test-powertools-service", + "METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}", + }, + } + + +def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN + start_date = execute_lambda.get_lambda_execution_time() + end_date = start_date + datetime.timedelta(minutes=5) + + # WHEN + metrics = helpers.get_metrics( + start_date=start_date, + end_date=end_date, + namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"], + metric_name=config["environment_variables"]["METRIC_NAME"], + service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"], + cw_client=boto3.client(service_name="cloudwatch"), + ) + + # THEN + assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1 + assert metrics.get("Values") and len(metrics.get("Values")) == 1 + assert metrics.get("Values") and metrics.get("Values")[0] == 1 diff --git a/tests/e2e/tracer/__init__.py b/tests/e2e/tracer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py new file mode 100644 index 00000000000..d074b30796f --- /dev/null +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -0,0 +1,25 @@ +import asyncio +import os + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer(service="e2e-tests-app") + +ANNOTATION_KEY = os.environ["ANNOTATION_KEY"] +ANNOTATION_VALUE = os.environ["ANNOTATION_VALUE"] +ANNOTATION_ASYNC_VALUE = os.environ["ANNOTATION_ASYNC_VALUE"] + + +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext): + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + return asyncio.run(collect_payment()) + + +@tracer.capture_method +async def collect_payment() -> str: + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) + return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py new file mode 100644 index 00000000000..c2af4386749 --- /dev/null +++ b/tests/e2e/tracer/test_tracer.py @@ -0,0 +1,51 @@ +import datetime +import uuid + +import boto3 +import pytest +from e2e import conftest +from e2e.utils import helpers + + +@pytest.fixture(scope="module") +def config() -> conftest.LambdaConfig: + return { + "parameters": {"tracing": "ACTIVE"}, + "environment_variables": { + "ANNOTATION_KEY": f"e2e-tracer-{str(uuid.uuid4()).replace('-','_')}", + "ANNOTATION_VALUE": "stored", + "ANNOTATION_ASYNC_VALUE": "payments", + }, + } + + +def test_basic_lambda_async_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") + start_date = execute_lambda.get_lambda_execution_time() + end_date = start_date + datetime.timedelta(minutes=5) + trace_filter_exporession = f'service("{lambda_name}")' + + # WHEN + trace = helpers.get_traces( + start_date=start_date, + end_date=end_date, + filter_expression=trace_filter_exporession, + xray_client=boto3.client("xray"), + ) + + # THEN + info = helpers.find_trace_additional_info(trace=trace) + print(info) + handler_trace_segment = [trace_segment for trace_segment in info if trace_segment.name == "## lambda_handler"][0] + collect_payment_trace_segment = [ + trace_segment for trace_segment in info if trace_segment.name == "## collect_payment" + ][0] + + annotation_key = config["environment_variables"]["ANNOTATION_KEY"] + expected_value = config["environment_variables"]["ANNOTATION_VALUE"] + expected_async_value = config["environment_variables"]["ANNOTATION_ASYNC_VALUE"] + + assert handler_trace_segment.annotations["Service"] == "e2e-tests-app" + assert handler_trace_segment.metadata["e2e-tests-app"][annotation_key] == expected_value + assert collect_payment_trace_segment.metadata["e2e-tests-app"][annotation_key] == expected_async_value diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/utils/Dockerfile new file mode 100644 index 00000000000..eccfe2c6dfd --- /dev/null +++ b/tests/e2e/utils/Dockerfile @@ -0,0 +1,16 @@ +# Image used by CDK's LayerVersion construct to create Lambda Layer with Powertools +# library code. +# The correct AWS SAM build image based on the runtime of the function will be +# passed as build arg. The default allows to do `docker build .` when testing. +ARG IMAGE=public.ecr.aws/sam/build-python3.7 +FROM $IMAGE + +ARG PIP_INDEX_URL +ARG PIP_EXTRA_INDEX_URL +ARG HTTPS_PROXY + +# Upgrade pip (required by cryptography v3.4 and above, which is a dependency of poetry) +RUN pip install --upgrade pip +RUN pip install pipenv poetry + +CMD [ "python" ] diff --git a/tests/e2e/utils/__init__.py b/tests/e2e/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py new file mode 100644 index 00000000000..3f88f44f933 --- /dev/null +++ b/tests/e2e/utils/helpers.py @@ -0,0 +1,131 @@ +import json +from datetime import datetime +from functools import lru_cache +from typing import Dict, List, Optional, Union + +from mypy_boto3_cloudwatch import type_defs +from mypy_boto3_cloudwatch.client import CloudWatchClient +from mypy_boto3_lambda.client import LambdaClient +from mypy_boto3_xray.client import XRayClient +from pydantic import BaseModel +from retry import retry + + +# Helper methods && Class +class Log(BaseModel): + level: str + location: str + message: Union[dict, str] + timestamp: str + service: str + cold_start: Optional[bool] + function_name: Optional[str] + function_memory_size: Optional[str] + function_arn: Optional[str] + function_request_id: Optional[str] + xray_trace_id: Optional[str] + extra_info: Optional[str] + + +class TraceSegment(BaseModel): + name: str + metadata: Dict = {} + annotations: Dict = {} + + +def trigger_lambda(lambda_arn: str, client: LambdaClient): + response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") + return response + + +@lru_cache(maxsize=10, typed=False) +@retry(ValueError, delay=1, jitter=1, tries=20) +def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: int, **kwargs: dict) -> List[Log]: + response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) + if not response["events"]: + raise ValueError("Empty response from Cloudwatch Logs. Repeating...") + filtered_logs = [] + for event in response["events"]: + try: + message = Log(**json.loads(event["message"])) + except json.decoder.JSONDecodeError: + continue + filtered_logs.append(message) + + return filtered_logs + + +@lru_cache(maxsize=10, typed=False) +@retry(ValueError, delay=1, jitter=1, tries=20) +def get_metrics( + namespace: str, + cw_client: CloudWatchClient, + start_date: datetime, + metric_name: str, + service_name: str, + end_date: Optional[datetime] = None, +) -> type_defs.MetricDataResultTypeDef: + response = cw_client.get_metric_data( + MetricDataQueries=[ + { + "Id": "m1", + "Expression": f'SELECT MAX("{metric_name}") from SCHEMA("{namespace}",service) \ + where service=\'{service_name}\'', + "ReturnData": True, + "Period": 600, + }, + ], + StartTime=start_date, + EndTime=end_date if end_date else datetime.utcnow(), + ) + result = response["MetricDataResults"][0] + if not result["Values"]: + raise ValueError("Empty response from Cloudwatch. Repeating...") + return result + + +@retry(ValueError, delay=1, jitter=1, tries=10) +def get_traces(filter_expression: str, xray_client: XRayClient, start_date: datetime, end_date: datetime) -> Dict: + paginator = xray_client.get_paginator("get_trace_summaries") + response_iterator = paginator.paginate( + StartTime=start_date, + EndTime=end_date, + TimeRangeType="Event", + Sampling=False, + FilterExpression=filter_expression, + ) + + traces = [trace["TraceSummaries"][0]["Id"] for trace in response_iterator if trace["TraceSummaries"]] + if not traces: + raise ValueError("Empty response from X-RAY. Repeating...") + + trace_details = xray_client.batch_get_traces( + TraceIds=traces, + ) + + return trace_details + + +def find_trace_additional_info(trace: Dict) -> List[TraceSegment]: + """Find all trace annotations and metadata and return them to the caller""" + info = [] + for segment in trace["Traces"][0]["Segments"]: + document = json.loads(segment["Document"]) + if document["origin"] == "AWS::Lambda::Function": + for subsegment in document["subsegments"]: + if subsegment["name"] == "Invocation": + find_meta(segment=subsegment, result=info) + return info + + +def find_meta(segment: dict, result: List): + for x_subsegment in segment["subsegments"]: + result.append( + TraceSegment( + name=x_subsegment["name"], + metadata=x_subsegment.get("metadata", {}), + annotations=x_subsegment.get("annotations", {}), + ) + ) + if x_subsegment.get("subsegments"): + find_meta(segment=x_subsegment, result=result) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py new file mode 100644 index 00000000000..001ae0e6346 --- /dev/null +++ b/tests/e2e/utils/infrastructure.py @@ -0,0 +1,212 @@ +import io +import json +import os +import sys +import zipfile +from abc import ABC, abstractmethod +from enum import Enum +from pathlib import Path +from typing import Dict, List, Tuple, Type + +import boto3 +import yaml +from aws_cdk import App, AssetStaging, BundlingOptions, CfnOutput, DockerImage, RemovalPolicy, Stack, aws_logs +from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing + +PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" + + +class PythonVersion(Enum): + V37 = {"runtime": Runtime.PYTHON_3_7, "image": Runtime.PYTHON_3_7.bundling_image.image} + V38 = {"runtime": Runtime.PYTHON_3_8, "image": Runtime.PYTHON_3_8.bundling_image.image} + V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} + + +class BaseInfrastructureStack(ABC): + @abstractmethod + def synthesize() -> Tuple[dict, str]: + ... + + @abstractmethod + def __call__() -> Tuple[dict, str]: + ... + + +class InfrastructureStack(BaseInfrastructureStack): + def __init__(self, handlers_dir: str, stack_name: str, config: dict) -> None: + self.stack_name = stack_name + self.handlers_dir = handlers_dir + self.config = config + + def _create_layer(self, stack: Stack): + output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") + input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") + powertools_layer = LayerVersion( + stack, + "aws-lambda-powertools", + layer_version_name="aws-lambda-powertools", + compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], + code=Code.from_asset( + path=".", + bundling=BundlingOptions( + image=DockerImage.from_build( + str(Path(__file__).parent), + build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, + ), + command=[ + "bash", + "-c", + rf"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + pip install -r /tmp/requirements.txt -t {output_dir} &&\ + cp -R {input_dir} {output_dir}", + ], + ), + ), + ) + return powertools_layer + + def _find_handlers(self, directory: str) -> List: + for root, _, files in os.walk(directory): + return [os.path.join(root, filename) for filename in files if filename.endswith(".py")] + + def synthesize(self, handlers: List[str]) -> Tuple[dict, str, str]: + integration_test_app = App() + stack = Stack(integration_test_app, self.stack_name) + powertools_layer = self._create_layer(stack) + code = Code.from_asset(self.handlers_dir) + + for filename_path in handlers: + filename = Path(filename_path).stem + function_python = Function( + stack, + f"{filename}-lambda", + runtime=PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"], + code=code, + handler=f"{filename}.lambda_handler", + layers=[powertools_layer], + environment=self.config.get("environment_variables"), + tracing=Tracing.ACTIVE + if self.config.get("parameters", {}).get("tracing") == "ACTIVE" + else Tracing.DISABLED, + ) + + aws_logs.LogGroup( + stack, + f"{filename}-lg", + log_group_name=f"/aws/lambda/{function_python.function_name}", + retention=aws_logs.RetentionDays.ONE_DAY, + removal_policy=RemovalPolicy.DESTROY, + ) + CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) + cloud_assembly = integration_test_app.synth() + cf_template = cloud_assembly.get_stack_by_name(self.stack_name).template + cloud_assembly_directory = cloud_assembly.directory + cloud_assembly_assets_manifest_path = cloud_assembly.get_stack_by_name(self.stack_name).dependencies[0].file + + return (cf_template, cloud_assembly_directory, cloud_assembly_assets_manifest_path) + + def __call__(self) -> Tuple[dict, str]: + handlers = self._find_handlers(directory=self.handlers_dir) + return self.synthesize(handlers=handlers) + + +class Infrastructure: + def __init__(self, stack_name: str, handlers_dir: str, config: dict) -> None: + session = boto3.Session() + self.s3_client = session.client("s3") + self.lambda_client = session.client("lambda") + self.cf_client = session.client("cloudformation") + self.s3_resource = session.resource("s3") + self.account_id = session.client("sts").get_caller_identity()["Account"] + self.region = session.region_name + self.stack_name = stack_name + self.handlers_dir = handlers_dir + self.config = config + + def deploy(self, Stack: Type[BaseInfrastructureStack]) -> Dict[str, str]: + + stack = Stack(handlers_dir=self.handlers_dir, stack_name=self.stack_name, config=self.config) + template, asset_root_dir, asset_manifest_file = stack() + self._upload_assets(asset_root_dir, asset_manifest_file) + + response = self._deploy_stack(self.stack_name, template) + + return self._transform_output(response["Stacks"][0]["Outputs"]) + + def delete(self): + self.cf_client.delete_stack(StackName=self.stack_name) + + def _upload_assets(self, asset_root_dir: str, asset_manifest_file: str): + """ + This method is drop-in replacement for cdk-assets package s3 upload part. + https://www.npmjs.com/package/cdk-assets. + We use custom solution to avoid dependencies from nodejs ecosystem. + We follow the same design cdk-assets: + https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. + """ + + assets = self._find_assets(asset_manifest_file, self.account_id, self.region) + + for s3_key, config in assets.items(): + print(config) + s3_bucket = self.s3_resource.Bucket(config["bucket_name"]) + + if config["asset_packaging"] != "zip": + print("Asset is not a zip file. Skipping upload") + continue + + if bool(list(s3_bucket.objects.filter(Prefix=s3_key))): + print("object exists, skipping") + continue + + buf = io.BytesIO() + asset_dir = f"{asset_root_dir}/{config['asset_path']}" + os.chdir(asset_dir) + asset_files = self._find_files(directory=".") + with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: + for asset_file in asset_files: + zf.write(os.path.join(asset_file)) + buf.seek(0) + self.s3_client.upload_fileobj(Fileobj=buf, Bucket=config["bucket_name"], Key=s3_key) + + def _find_files(self, directory: str) -> List: + file_paths = [] + for root, _, files in os.walk(directory): + for filename in files: + file_paths.append(os.path.join(root, filename)) + return file_paths + + def _deploy_stack(self, stack_name: str, template: dict): + response = self.cf_client.create_stack( + StackName=stack_name, + TemplateBody=yaml.dump(template), + TimeoutInMinutes=10, + OnFailure="ROLLBACK", + Capabilities=["CAPABILITY_IAM"], + ) + waiter = self.cf_client.get_waiter("stack_create_complete") + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 10, "MaxAttempts": 50}) + response = self.cf_client.describe_stacks(StackName=stack_name) + return response + + def _find_assets(self, asset_template: str, account_id: str, region: str): + assets = {} + with open(asset_template, mode="r") as template: + for _, config in json.loads(template.read())["files"].items(): + asset_path = config["source"]["path"] + asset_packaging = config["source"]["packaging"] + bucket_name = config["destinations"]["current_account-current_region"]["bucketName"] + object_key = config["destinations"]["current_account-current_region"]["objectKey"] + + assets[object_key] = { + "bucket_name": bucket_name.replace("${AWS::AccountId}", account_id).replace( + "${AWS::Region}", region + ), + "asset_path": asset_path, + "asset_packaging": asset_packaging, + } + + return assets + + def _transform_output(self, outputs: dict): + return {output["OutputKey"]: output["OutputValue"] for output in outputs if output["OutputKey"]} diff --git a/tests/e2e/utils/py.typed b/tests/e2e/utils/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/parser/test_sns.py b/tests/functional/parser/test_sns.py index 81158a4419e..b0d9ff69a9b 100644 --- a/tests/functional/parser/test_sns.py +++ b/tests/functional/parser/test_sns.py @@ -1,3 +1,4 @@ +import json from typing import Any, List import pytest @@ -103,3 +104,29 @@ def handle_sns_sqs_json_body(event: List[MySnsBusiness], _: LambdaContext): def test_handle_sns_sqs_trigger_event_json_body(): # noqa: F811 event_dict = load_event("snsSqsEvent.json") handle_sns_sqs_json_body(event_dict, LambdaContext()) + + +def test_handle_sns_sqs_trigger_event_json_body_missing_signing_cert_url(): + # GIVEN an event is tampered with a missing SigningCertURL + event_dict = load_event("snsSqsEvent.json") + payload = json.loads(event_dict["Records"][0]["body"]) + payload.pop("SigningCertURL") + event_dict["Records"][0]["body"] = json.dumps(payload) + + # WHEN parsing the payload + # THEN raise a ValidationError error + with pytest.raises(ValidationError): + handle_sns_sqs_json_body(event_dict, LambdaContext()) + + +def test_handle_sns_sqs_trigger_event_json_body_missing_unsubscribe_url(): + # GIVEN an event is tampered with a missing UnsubscribeURL + event_dict = load_event("snsSqsEvent.json") + payload = json.loads(event_dict["Records"][0]["body"]) + payload.pop("UnsubscribeURL") + event_dict["Records"][0]["body"] = json.dumps(payload) + + # WHEN parsing the payload + # THEN raise a ValidationError error + with pytest.raises(ValidationError): + handle_sns_sqs_json_body(event_dict, LambdaContext()) diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index c76faa7cde5..c8b3dc61755 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -625,6 +625,40 @@ def handler(event, context): assert all(k in second_log for k in lambda_context_keys) +def test_logger_custom_formatter_has_standard_and_custom_keys(stdout, service_name, lambda_context): + class CustomFormatter(LambdaPowertoolsFormatter): + ... + + # GIVEN a Logger is initialized with a custom formatter + logger = Logger(service=service_name, stream=stdout, logger_formatter=CustomFormatter(), my_key="value") + + # WHEN a lambda function is decorated with logger + @logger.inject_lambda_context + def handler(event, context): + logger.info("Hello") + + handler({}, lambda_context) + + standard_keys = ( + "level", + "location", + "message", + "timestamp", + "service", + "cold_start", + "function_name", + "function_memory_size", + "function_arn", + "function_request_id", + ) + + log = capture_logging_output(stdout) + + # THEN all standard keys should be available + assert all(k in log for k in standard_keys) + assert "my_key" in log + + def test_logger_custom_handler(lambda_context, service_name, tmp_path): # GIVEN a Logger is initialized with a FileHandler log_file = tmp_path / "log.json"