diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index e7fbd3906b1..53e362cc779 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -38,7 +38,7 @@ body: options: - label: This feature request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/) + - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/maintenance.yml b/.github/ISSUE_TEMPLATE/maintenance.yml index 95473ac1c61..9f9798c8aed 100644 --- a/.github/ISSUE_TEMPLATE/maintenance.yml +++ b/.github/ISSUE_TEMPLATE/maintenance.yml @@ -55,7 +55,7 @@ body: options: - label: This request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/) + - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/rfc.yml b/.github/ISSUE_TEMPLATE/rfc.yml index 6e4b0e047f8..457ae558bc6 100644 --- a/.github/ISSUE_TEMPLATE/rfc.yml +++ b/.github/ISSUE_TEMPLATE/rfc.yml @@ -93,7 +93,7 @@ body: options: - label: This feature request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/) + - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/tech_debt.yml b/.github/ISSUE_TEMPLATE/tech_debt.yml index 84b5cffe189..f2933cb4ce8 100644 --- a/.github/ISSUE_TEMPLATE/tech_debt.yml +++ b/.github/ISSUE_TEMPLATE/tech_debt.yml @@ -52,7 +52,7 @@ body: options: - label: This request meets [Lambda Powertools Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets) required: true - - label: Should this be considered in other Lambda Powertools languages? i.e. [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/) + - label: Should this be considered in other Lambda Powertools languages? i.e. [Java](https://github.com/awslabs/aws-lambda-powertools-java/), [TypeScript](https://github.com/awslabs/aws-lambda-powertools-typescript/), and [.NET](https://github.com/awslabs/aws-lambda-powertools-dotnet/) required: false - type: markdown attributes: diff --git a/.github/workflows/dispatch_analytics.yml b/.github/workflows/dispatch_analytics.yml new file mode 100644 index 00000000000..49a276f6f61 --- /dev/null +++ b/.github/workflows/dispatch_analytics.yml @@ -0,0 +1,43 @@ +name: Dispatch analytics + +on: + workflow_dispatch: + + schedule: + - cron: '0 * * * *' + +permissions: + id-token: write + actions: read + checks: read + contents: read + deployments: read + issues: read + discussions: read + packages: read + pages: read + pull-requests: read + repository-projects: read + security-events: read + statuses: read + +jobs: + dispatch_token: + concurrency: + group: analytics + runs-on: ubuntu-latest + environment: analytics + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef + with: + aws-region: eu-central-1 + role-to-assume: ${{ secrets.AWS_ANALYTICS_ROLE_ARN }} + + - name: Invoke Lambda function + run: | + payload=$(echo -n '{"githubToken": "${{ secrets.GITHUB_TOKEN }}"}' | base64) + aws lambda invoke \ + --function-name ${{ secrets.AWS_ANALYTICS_DISPATCHER_ARN }} \ + --payload "$payload" response.json + cat response.json diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a3f05dc15e0..8d965a4af4c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,17 +5,18 @@ name: Release # === Automated activities === # # 1. Run tests, linting, security and complexity base line -# 2. Bump package version and generate latest Changelog -# 3. Publish package to PyPi test and prod repository -# 4. Kick off SAR App pipeline to publish latest version with minimal and extra dependencies -# 5. Builds and publish latest changelog from tip of the branch +# 2. Bump package version, build release artifact, and generate latest Changelog +# 3. Publish package to PyPi prod repository using cached artifact +# 4. Kick off Layers pipeline to compile and publish latest version +# 5. Updates documentation to use the latest Layer ARN for all commercial regions # 6. Builds a new user guide and API docs with release version; update /latest pointing to newly released version # 7. Close all issues labeled "pending-release" and notify customers about the release # # === Manual activities === # -# 1. Edit the current draft release notes -# 2. If not already set, use `v` as a tag, e.g., v1.26.4, and select develop as target branch +# 1. Kick off this workflow with the intended version +# 2. Update draft release notes after this workflow completes +# 3. If not already set, use `v` as a tag, e.g., v1.26.4, and select develop as target branch # See MAINTAINERS.md "Releasing a new version" for release mechanisms @@ -47,8 +48,7 @@ on: required: false jobs: - release: - environment: release + build: runs-on: aws-lambda-powertools_ubuntu-latest_4-core permissions: contents: write @@ -90,21 +90,21 @@ jobs: id: versioning run: poetry version "${RELEASE_VERSION}" - name: Build python package and wheel - if: ${{ !inputs.skip_pypi }} run: poetry build - # March 1st: PyPi test is under maintenance.... - # - name: Upload to PyPi test - # if: ${{ !inputs.skip_pypi }} - # run: make release-test - # env: - # PYPI_USERNAME: __token__ - # PYPI_TEST_TOKEN: ${{ secrets.PYPI_TEST_TOKEN }} - - name: Upload to PyPi prod - if: ${{ !inputs.skip_pypi }} - run: make release-prod - env: - PYPI_USERNAME: __token__ - PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + + - name: Cache release artifact + id: cache-release-build + uses: actions/cache/save@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 + with: + path: dist/ + # NOTE: cache key uses a hash of (Runner OS + Version to be released + Deps) + # since a new release might not change a dependency but version + # otherwise we might accidentally reuse a previously cached artifact for a newer release. + # The reason we don't add pyproject.toml here is to avoid racing conditions + # where git checkout might happen too fast and doesn't pick up the latest version + # and also future-proof for when we switch to protected branch and update via PR + key: ${{ runner.os }}-${{ env.RELEASE_VERSION }}-${{ hashFiles('**/poetry.lock') }} + - name: Update version in trunk if: steps.versioning.outcome == 'success' run: | @@ -115,6 +115,34 @@ jobs: git pull origin "${BRANCH}" # prevents concurrent branch update failing push git push origin HEAD:refs/heads/"${BRANCH}" + release: + needs: build + environment: release + runs-on: aws-lambda-powertools_ubuntu-latest_4-core + permissions: + id-token: write # OIDC for PyPi Trusted Publisher feature + env: + RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v3 + - name: Restore release artifact from cache + id: restore-release-build + uses: actions/cache/restore@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 + with: + path: dist/ + key: ${{ runner.os }}-${{ env.RELEASE_VERSION }}-${{ hashFiles('**/poetry.lock') }} + + - name: Upload to PyPi prod + if: ${{ !inputs.skip_pypi }} + uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598 # v1.8.6 + + # March 1st: PyPi test is under maintenance.... + # - name: Upload to PyPi test + # if: ${{ !inputs.skip_pypi }} + # uses: pypa/gh-action-pypi-publish@a56da0b891b3dc519c7ee3284aff1fad93cc8598 # v1.8.6 + # with: + # repository-url: https://test.pypi.org/legacy/ + changelog: needs: release permissions: @@ -124,7 +152,7 @@ jobs: # NOTE: Watch out for the depth limit of 4 nested workflow_calls. # publish_layer -> publish_v2_layer -> reusable_deploy_v2_layer_stack -> reusable_update_v2_layer_arn_docs publish_layer: - needs: release + needs: [build, release] secrets: inherit permissions: id-token: write @@ -132,11 +160,11 @@ jobs: pages: write uses: ./.github/workflows/publish_v2_layer.yml with: - latest_published_version: ${{ needs.release.outputs.RELEASE_VERSION }} + latest_published_version: ${{ needs.build.outputs.RELEASE_VERSION }} pre_release: ${{ inputs.pre_release }} post_release: - needs: [release, publish_layer] + needs: [build, release, publish_layer] permissions: contents: read issues: write @@ -144,7 +172,7 @@ jobs: pull-requests: write runs-on: ubuntu-latest env: - RELEASE_VERSION: ${{ needs.release.outputs.RELEASE_VERSION }} + RELEASE_VERSION: ${{ needs.build.outputs.RELEASE_VERSION }} steps: - uses: actions/checkout@v3 - name: Close issues related to this release diff --git a/CHANGELOG.md b/CHANGELOG.md index aaec15e4ed9..541e8f3e0ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,52 @@ ## Bug Fixes +* typo +* **ci:** pypi publishing was targetting test endpoint + +## Documentation + +* **batch:** fixed typo in DynamoDB Streams section ([#2189](https://github.com/awslabs/aws-lambda-powertools-python/issues/2189)) +* **examples:** standardize lambda handler function name ([#2192](https://github.com/awslabs/aws-lambda-powertools-python/issues/2192)) +* **homepage:** add customer references section ([#2159](https://github.com/awslabs/aws-lambda-powertools-python/issues/2159)) +* **jmespath:** fix MD037/no-space-in-emphasis +* **tutorial:** use newer sam cli template; update to py3.10 ([#2167](https://github.com/awslabs/aws-lambda-powertools-python/issues/2167)) +* **we-made-this:** add serverless transactional message app ([#2182](https://github.com/awslabs/aws-lambda-powertools-python/issues/2182)) + +## Features + +* **ci:** dispatch GitHub analytics action ([#2161](https://github.com/awslabs/aws-lambda-powertools-python/issues/2161)) +* **event_source:** support custom json_deserializer; add json_body in SQSEvent ([#2200](https://github.com/awslabs/aws-lambda-powertools-python/issues/2200)) +* **event_source:** add support for dynamic partitions in the Api Gateway Authorizer event ([#2176](https://github.com/awslabs/aws-lambda-powertools-python/issues/2176)) +* **event_sources:** Add __str__ to Data Classes base DictWrapper ([#2129](https://github.com/awslabs/aws-lambda-powertools-python/issues/2129)) +* **jmespath:** new built-in envelopes to unwrap S3 events ([#2169](https://github.com/awslabs/aws-lambda-powertools-python/issues/2169)) +* **logger:** add DatadogLogFormatter and observability provider ([#2183](https://github.com/awslabs/aws-lambda-powertools-python/issues/2183)) +* **metrics:** add flush_metrics() method to allow manual flushing of metrics ([#2171](https://github.com/awslabs/aws-lambda-powertools-python/issues/2171)) +* **parser:** add support for SQS-wrapped S3 event notifications ([#2108](https://github.com/awslabs/aws-lambda-powertools-python/issues/2108)) + +## Maintenance + +* add dummy reusable dispatch analytics job +* **ci:** use new pypi trusted publisher for increase security ([#2198](https://github.com/awslabs/aws-lambda-powertools-python/issues/2198)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.5 to 1.8.6 ([#2201](https://github.com/awslabs/aws-lambda-powertools-python/issues/2201)) +* **deps-dev:** bump mkdocs-material from 9.1.8 to 9.1.9 ([#2190](https://github.com/awslabs/aws-lambda-powertools-python/issues/2190)) +* **deps-dev:** bump types-requests from 2.28.11.17 to 2.29.0.0 ([#2187](https://github.com/awslabs/aws-lambda-powertools-python/issues/2187)) +* **deps-dev:** bump coverage from 7.2.4 to 7.2.5 ([#2186](https://github.com/awslabs/aws-lambda-powertools-python/issues/2186)) +* **deps-dev:** bump coverage from 7.2.3 to 7.2.4 ([#2179](https://github.com/awslabs/aws-lambda-powertools-python/issues/2179)) +* **deps-dev:** bump importlib-metadata from 6.5.0 to 6.6.0 ([#2163](https://github.com/awslabs/aws-lambda-powertools-python/issues/2163)) +* **deps-dev:** bump mypy-boto3-xray from 1.26.11.post1 to 1.26.122 ([#2173](https://github.com/awslabs/aws-lambda-powertools-python/issues/2173)) +* **deps-dev:** bump aws-cdk from 2.76.0 to 2.77.0 ([#2174](https://github.com/awslabs/aws-lambda-powertools-python/issues/2174)) +* **deps-dev:** bump mypy-boto3-lambda from 1.26.115 to 1.26.122 ([#2172](https://github.com/awslabs/aws-lambda-powertools-python/issues/2172)) +* **deps-dev:** bump cfn-lint from 0.77.2 to 0.77.3 ([#2165](https://github.com/awslabs/aws-lambda-powertools-python/issues/2165)) +* **deps-dev:** bump mkdocs-material from 9.1.6 to 9.1.8 ([#2162](https://github.com/awslabs/aws-lambda-powertools-python/issues/2162)) +* **deps-dev:** bump cfn-lint from 0.77.3 to 0.77.4 ([#2178](https://github.com/awslabs/aws-lambda-powertools-python/issues/2178)) +* **governance:** add Lambda Powertools for .NET in issue templates ([#2196](https://github.com/awslabs/aws-lambda-powertools-python/issues/2196)) + + + +## [v2.14.1] - 2023-04-21 +## Bug Fixes + * **batch:** resolve use of ValidationError in batch ([#2157](https://github.com/awslabs/aws-lambda-powertools-python/issues/2157)) * **e2e:** fix test brittleness ([#2152](https://github.com/awslabs/aws-lambda-powertools-python/issues/2152)) @@ -19,14 +65,15 @@ ## Maintenance +* update v2 layer ARN on documentation * add Python 3.10 PyPi language classifier ([#2144](https://github.com/awslabs/aws-lambda-powertools-python/issues/2144)) * update v2 layer ARN on documentation * **batch:** safeguard custom use of BatchProcessingError exception ([#2155](https://github.com/awslabs/aws-lambda-powertools-python/issues/2155)) * **deps:** bump codecov/codecov-action from 3.1.2 to 3.1.3 ([#2153](https://github.com/awslabs/aws-lambda-powertools-python/issues/2153)) * **deps:** bump dependabot/fetch-metadata from 1.3.6 to 1.4.0 ([#2140](https://github.com/awslabs/aws-lambda-powertools-python/issues/2140)) * **deps-dev:** bump aws-cdk from 2.75.0 to 2.75.1 ([#2150](https://github.com/awslabs/aws-lambda-powertools-python/issues/2150)) -* **deps-dev:** bump mypy-boto3-secretsmanager from 1.26.89 to 1.26.116 ([#2147](https://github.com/awslabs/aws-lambda-powertools-python/issues/2147)) * **deps-dev:** bump aws-cdk from 2.75.1 to 2.76.0 ([#2154](https://github.com/awslabs/aws-lambda-powertools-python/issues/2154)) +* **deps-dev:** bump mypy-boto3-secretsmanager from 1.26.89 to 1.26.116 ([#2147](https://github.com/awslabs/aws-lambda-powertools-python/issues/2147)) * **deps-dev:** bump importlib-metadata from 6.4.1 to 6.5.0 ([#2141](https://github.com/awslabs/aws-lambda-powertools-python/issues/2141)) * **deps-dev:** bump mypy-boto3-s3 from 1.26.104 to 1.26.116 ([#2149](https://github.com/awslabs/aws-lambda-powertools-python/issues/2149)) * **deps-dev:** bump filelock from 3.11.0 to 3.12.0 ([#2142](https://github.com/awslabs/aws-lambda-powertools-python/issues/2142)) @@ -3164,7 +3211,8 @@ * Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.14.0...HEAD +[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.14.1...HEAD +[v2.14.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.14.0...v2.14.1 [v2.14.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.13.0...v2.14.0 [v2.13.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.12.0...v2.13.0 [v2.12.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.11.0...v2.12.0 diff --git a/MAINTAINERS.md b/MAINTAINERS.md index a82c160a58d..3525147f68f 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -191,7 +191,7 @@ Reword through rebase and push with `--force-with-lease` once you're confident. **Looks good, what's next?** -The only step is to draft and publish a good release notes, everything else is automated. +Kickoff the `Release` workflow with the intended version. Once complete, update the draft release notes within the `` section summarizing why customers should care about this release. #### Drafting release notes diff --git a/README.md b/README.md index b79f43a85f3..310c267af6e 100644 --- a/README.md +++ b/README.md @@ -45,6 +45,31 @@ With [pip](https://pip.pypa.io/en/latest/index.html) installed, run: ``pip insta * [Serverless E-commerce platform](https://github.com/aws-samples/aws-serverless-ecommerce-platform) * [Serverless GraphQL Nanny Booking Api](https://github.com/trey-rosius/babysitter_api) +## How to support AWS Lambda Powertools for Python? + +### Becoming a reference customer + +Knowing which companies are using this library is important to help prioritize the project internally. If your company is using AWS Lambda Powertools for Python, you can request to have your name and logo added to the README file by raising a [Support Lambda Powertools (become a reference)](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E) issue. + +The following companies, among others, use Powertools: + +* [CPQi (Exadel Financial Services)](https://cpqi.com/) +* [CloudZero](https://www.cloudzero.com/) +* [CyberArk](https://www.cyberark.com/) +* [globaldatanet](https://globaldatanet.com/) +* [IMS](https://ims.tech/) +* [Propellor.ai](https://www.propellor.ai/) +* [TopSport](https://www.topsport.com.au/) +* [Trek10](https://www.trek10.com/) + +### Sharing your work + +Share what you did with Powertools 💞💞. Blog post, workshops, presentation, sample apps and others. Check out what the community has already shared about Powertools [here](https://awslabs.github.io/aws-lambda-powertools-python/latest/we_made_this/). + +### Using Lambda Layer or SAR + +This helps us understand who uses Powertools in a non-intrusive way, and helps us gain future investments for other Powertools languages. When [using Layers](https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer), you can add Powertools as a dev dependency (or as part of your virtual env) to not impact the development process. + ## Credits * Structured logging initial implementation from [aws-lambda-logging](https://gitlab.com/hadrien/aws_lambda_logging) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index 93e91e0dc8d..db80876c798 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -139,11 +139,11 @@ def __init__( if self.utc: self.converter = time.gmtime - super(LambdaPowertoolsFormatter, self).__init__(datefmt=self.datefmt) - self.keys_combined = {**self._build_default_keys(), **kwargs} self.log_format.update(**self.keys_combined) + super().__init__(datefmt=self.datefmt) + def serialize(self, log: Dict) -> str: """Serialize structured log dict to JSON str""" return self.json_serializer(log) diff --git a/aws_lambda_powertools/logging/formatters/__init__.py b/aws_lambda_powertools/logging/formatters/__init__.py new file mode 100644 index 00000000000..b6974414f4c --- /dev/null +++ b/aws_lambda_powertools/logging/formatters/__init__.py @@ -0,0 +1,5 @@ +"""Built-in Logger formatters for Observability Providers that require custom config.""" + +# NOTE: we don't expose formatters directly (barrel import) +# as we cannot know if they'll need additional dependencies in the future +# so we isolate to avoid a performance hit and workarounds like lazy imports diff --git a/aws_lambda_powertools/logging/formatters/datadog.py b/aws_lambda_powertools/logging/formatters/datadog.py new file mode 100644 index 00000000000..fa92bf74598 --- /dev/null +++ b/aws_lambda_powertools/logging/formatters/datadog.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from typing import Any, Callable + +from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter + + +class DatadogLogFormatter(LambdaPowertoolsFormatter): + def __init__( + self, + json_serializer: Callable[[dict], str] | None = None, + json_deserializer: Callable[[dict | str | bool | int | float], str] | None = None, + json_default: Callable[[Any], Any] | None = None, + datefmt: str | None = None, + use_datetime_directive: bool = False, + log_record_order: list[str] | None = None, + utc: bool = False, + use_rfc3339: bool = True, # NOTE: The only change from our base formatter + **kwargs, + ): + """Datadog formatter to comply with Datadog log parsing + + Changes compared to the default Logger Formatter: + + - timestamp format to use RFC3339 e.g., "2023-05-01T15:34:26.841+0200" + + + Parameters + ---------- + log_record_order : list[str] | None, optional + _description_, by default None + + Parameters + ---------- + json_serializer : Callable, optional + function to serialize `obj` to a JSON formatted `str`, by default json.dumps + json_deserializer : Callable, optional + function to deserialize `str`, `bytes`, bytearray` containing a JSON document to a Python `obj`, + by default json.loads + json_default : Callable, optional + function to coerce unserializable values, by default str + + Only used when no custom JSON encoder is set + + datefmt : str, optional + String directives (strftime) to format log timestamp. + + See https://docs.python.org/3/library/time.html#time.strftime or + use_datetime_directive: str, optional + Interpret `datefmt` as a format string for `datetime.datetime.strftime`, rather than + `time.strftime` - Only useful when used alongside `datefmt`. + + See https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior . This + also supports a custom %F directive for milliseconds. + + log_record_order : list, optional + set order of log keys when logging, by default ["level", "location", "message", "timestamp"] + + utc : bool, optional + set logging timestamp to UTC, by default False to continue to use local time as per stdlib + use_rfc3339: bool, optional + Whether to use a popular dateformat that complies with both RFC3339 and ISO8601. + e.g., 2022-10-27T16:27:43.738+02:00. + kwargs + Key-value to persist in all log messages + """ + super().__init__( + json_serializer=json_serializer, + json_deserializer=json_deserializer, + json_default=json_default, + datefmt=datefmt, + use_datetime_directive=use_datetime_directive, + log_record_order=log_record_order, + utc=utc, + use_rfc3339=use_rfc3339, + **kwargs, + ) diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index b96356192ab..59daafa0bb1 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -328,6 +328,28 @@ def clear_metrics(self) -> None: self.dimension_set.clear() self.metadata_set.clear() + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + """Manually flushes the metrics. This is normally not necessary, + unless you're running on other runtimes besides Lambda, where the @log_metrics + decorator already handles things for you. + + Parameters + ---------- + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + """ + if not raise_on_empty_metrics and not self.metric_set: + warnings.warn( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'", + stacklevel=2, + ) + else: + logger.debug("Flushing existing metrics") + metrics = self.serialize_metric_set() + print(json.dumps(metrics, separators=(",", ":"))) + self.clear_metrics() + def log_metrics( self, lambda_handler: Union[Callable[[Dict, Any], Any], Optional[Callable[[Dict, Any, Optional[Dict]], Any]]] = None, @@ -390,16 +412,7 @@ def decorate(event, context): if capture_cold_start_metric: self._add_cold_start_metric(context=context) finally: - if not raise_on_empty_metrics and not self.metric_set: - warnings.warn( - "No application metrics to publish. The cold-start metric may be published if enabled. " - "If application metrics should never be empty, consider using 'raise_on_empty_metrics'", - stacklevel=2, - ) - else: - metrics = self.serialize_metric_set() - self.clear_metrics() - print(json.dumps(metrics, separators=(",", ":"))) + self.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics) return response diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py index 51f8f74b56a..431d678e9b6 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py @@ -21,8 +21,9 @@ def __init__( stage: str, http_method: str, resource: str, + partition: str = "aws", ): - self.partition = "aws" + self.partition = partition self.region = region self.aws_account_id = aws_account_id self.api_id = api_id @@ -55,6 +56,7 @@ def parse_api_gateway_arn(arn: str) -> APIGatewayRouteArn: arn_parts = arn.split(":") api_gateway_arn_parts = arn_parts[5].split("/") return APIGatewayRouteArn( + partition=arn_parts[1], region=arn_parts[3], aws_account_id=arn_parts[4], api_id=api_gateway_arn_parts[0], @@ -369,6 +371,7 @@ def __init__( stage: str, context: Optional[Dict] = None, usage_identifier_key: Optional[str] = None, + partition: str = "aws", ): """ Parameters @@ -401,6 +404,9 @@ def __init__( If the API uses a usage plan (the apiKeySource is set to `AUTHORIZER`), the Lambda authorizer function must return one of the usage plan's API keys as the usageIdentifierKey property value. > **Note:** This only applies for REST APIs. + partition: str, optional + Optional, arn partition. + See https://docs.aws.amazon.com/IAM/latest/UserGuide/reference-arns.html """ self.principal_id = principal_id self.region = region @@ -412,6 +418,7 @@ def __init__( self._allow_routes: List[Dict] = [] self._deny_routes: List[Dict] = [] self._resource_pattern = re.compile(self.path_regex) + self.partition = partition @staticmethod def from_route_arn( @@ -443,7 +450,7 @@ def _add_route(self, effect: str, http_method: str, resource: str, conditions: O raise ValueError(f"Invalid resource path: {resource}. Path should match {self.path_regex}") resource_arn = APIGatewayRouteArn( - self.region, self.aws_account_id, self.api_id, self.stage, http_method, resource + self.region, self.aws_account_id, self.api_id, self.stage, http_method, resource, self.partition ).arn route = {"resourceArn": resource_arn, "conditions": conditions} diff --git a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py index a4139ebbe68..c502aacb090 100644 --- a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py +++ b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py @@ -80,6 +80,8 @@ def location(self) -> CodePipelineLocation: class CodePipelineArtifactCredentials(DictWrapper): + _sensitive_properties = ["secret_access_key", "session_token"] + @property def access_key_id(self) -> str: return self["accessKeyId"] diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index fa0d479af8a..d1ce8f90a07 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -1,7 +1,7 @@ import base64 import json from collections.abc import Mapping -from typing import Any, Dict, Iterator, Optional +from typing import Any, Callable, Dict, Iterator, List, Optional from aws_lambda_powertools.shared.headers_serializer import BaseHeadersSerializer @@ -9,9 +9,19 @@ class DictWrapper(Mapping): """Provides a single read only access to a wrapper dict""" - def __init__(self, data: Dict[str, Any]): + def __init__(self, data: Dict[str, Any], json_deserializer: Optional[Callable] = None): + """ + Parameters + ---------- + data : Dict[str, Any] + Lambda Event Source Event payload + json_deserializer : Callable, optional + function to deserialize `str`, `bytes`, bytearray` containing a JSON document to a Python `obj`, + by default json.loads + """ self._data = data self._json_data: Optional[Any] = None + self._json_deserializer = json_deserializer or json.loads def __getitem__(self, key: str) -> Any: return self._data[key] @@ -28,6 +38,49 @@ def __iter__(self) -> Iterator: def __len__(self) -> int: return len(self._data) + def __str__(self) -> str: + return str(self._str_helper()) + + def _str_helper(self) -> Dict[str, Any]: + """ + Recursively get a Dictionary of DictWrapper properties primarily + for use by __str__ for debugging purposes. + + Will remove "raw_event" properties, and any defined by the Data Class + `_sensitive_properties` list field. + This should be used in case where secrets, such as access keys, are + stored in the Data Class but should not be logged out. + """ + properties = self._properties() + sensitive_properties = ["raw_event"] + if hasattr(self, "_sensitive_properties"): + sensitive_properties.extend(self._sensitive_properties) # pyright: ignore + + result: Dict[str, Any] = {} + for property_key in properties: + if property_key in sensitive_properties: + result[property_key] = "[SENSITIVE]" + else: + try: + property_value = getattr(self, property_key) + result[property_key] = property_value + + # Checks whether the class is a subclass of the parent class to perform a recursive operation. + if issubclass(property_value.__class__, DictWrapper): + result[property_key] = property_value._str_helper() + # Checks if the key is a list and if it is a subclass of the parent class + elif isinstance(property_value, list): + for seq, item in enumerate(property_value): + if issubclass(item.__class__, DictWrapper): + result[property_key][seq] = item._str_helper() + except Exception: + result[property_key] = "[Cannot be deserialized]" + + return result + + def _properties(self) -> List[str]: + return [p for p in dir(self.__class__) if isinstance(getattr(self.__class__, p), property)] + def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]: return self._data.get(key, default) @@ -79,7 +132,7 @@ def body(self) -> Optional[str]: def json_body(self) -> Any: """Parses the submitted body as json""" if self._json_data is None: - self._json_data = json.loads(self.decoded_body) + self._json_data = self._json_deserializer(self.decoded_body) return self._json_data @property diff --git a/aws_lambda_powertools/utilities/data_classes/kafka_event.py b/aws_lambda_powertools/utilities/data_classes/kafka_event.py index e52cc5d8dc1..4773d9e50de 100644 --- a/aws_lambda_powertools/utilities/data_classes/kafka_event.py +++ b/aws_lambda_powertools/utilities/data_classes/kafka_event.py @@ -1,5 +1,4 @@ import base64 -import json from typing import Any, Dict, Iterator, List, Optional from aws_lambda_powertools.utilities.data_classes.common import DictWrapper @@ -55,7 +54,7 @@ def decoded_value(self) -> bytes: def json_value(self) -> Any: """Decodes the text encoded data as JSON.""" if self._json_data is None: - self._json_data = json.loads(self.decoded_value.decode("utf-8")) + self._json_data = self._json_deserializer(self.decoded_value.decode("utf-8")) return self._json_data @property @@ -117,7 +116,7 @@ def records(self) -> Iterator[KafkaEventRecord]: """The Kafka records.""" for chunk in self["records"].values(): for record in chunk: - yield KafkaEventRecord(record) + yield KafkaEventRecord(data=record, json_deserializer=self._json_deserializer) @property def record(self) -> KafkaEventRecord: diff --git a/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py b/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py index 5683902f9d0..47dc196856d 100644 --- a/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py +++ b/aws_lambda_powertools/utilities/data_classes/kinesis_firehose_event.py @@ -1,5 +1,4 @@ import base64 -import json from typing import Iterator, Optional from aws_lambda_powertools.utilities.data_classes.common import DictWrapper @@ -75,7 +74,7 @@ def data_as_text(self) -> str: def data_as_json(self) -> dict: """Decoded base64-encoded data loaded to json""" if self._json_data is None: - self._json_data = json.loads(self.data_as_text) + self._json_data = self._json_deserializer(self.data_as_text) return self._json_data @@ -110,4 +109,4 @@ def region(self) -> str: @property def records(self) -> Iterator[KinesisFirehoseRecord]: for record in self["records"]: - yield KinesisFirehoseRecord(record) + yield KinesisFirehoseRecord(data=record, json_deserializer=self._json_deserializer) diff --git a/aws_lambda_powertools/utilities/data_classes/sqs_event.py b/aws_lambda_powertools/utilities/data_classes/sqs_event.py index 7d0dbe49352..2b3224358d8 100644 --- a/aws_lambda_powertools/utilities/data_classes/sqs_event.py +++ b/aws_lambda_powertools/utilities/data_classes/sqs_event.py @@ -1,4 +1,4 @@ -from typing import Dict, Iterator, Optional +from typing import Any, Dict, Iterator, Optional from aws_lambda_powertools.utilities.data_classes.common import DictWrapper @@ -103,6 +103,35 @@ def body(self) -> str: """The message's contents (not URL-encoded).""" return self["body"] + @property + def json_body(self) -> Any: + """Deserializes JSON string available in 'body' property + + Notes + ----- + + **Strict typing** + + Caller controls the type as we can't use recursive generics here. + + JSON Union types would force caller to have to cast a type. Instead, + we choose Any to ease ergonomics and other tools receiving this data. + + Examples + -------- + + **Type deserialized data from JSON string** + + ```python + data: dict = record.json_body # {"telemetry": [], ...} + # or + data: list = record.json_body # ["telemetry_values"] + ``` + """ + if self._json_data is None: + self._json_data = self._json_deserializer(self["body"]) + return self._json_data + @property def attributes(self) -> SQSRecordAttributes: """A map of the attributes requested in ReceiveMessage to their respective values.""" @@ -157,4 +186,4 @@ class SQSEvent(DictWrapper): @property def records(self) -> Iterator[SQSRecord]: for record in self["Records"]: - yield SQSRecord(record) + yield SQSRecord(data=record, json_deserializer=self._json_deserializer) diff --git a/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py b/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py index df50e5f98d4..f4aecb24bae 100644 --- a/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py +++ b/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py @@ -6,3 +6,8 @@ CLOUDWATCH_EVENTS_SCHEDULED = EVENTBRIDGE KINESIS_DATA_STREAM = "Records[*].kinesis.powertools_json(powertools_base64(data))" CLOUDWATCH_LOGS = "awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]" +S3_SNS_SQS = "Records[*].powertools_json(body).powertools_json(Message).Records[0]" +S3_SQS = "Records[*].powertools_json(body).Records[0]" +S3_SNS_KINESIS_FIREHOSE = "records[*].powertools_json(powertools_base64(data)).powertools_json(Message).Records[0]" +S3_KINESIS_FIREHOSE = "records[*].powertools_json(powertools_base64(data)).Records[0]" +S3_EVENTBRIDGE_SQS = "Records[*].powertools_json(body).detail" diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 757b9c4fff5..5f7a8a6b550 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -50,6 +50,10 @@ S3Model, S3RecordModel, ) +from .s3_event_notification import ( + S3SqsEventNotificationModel, + S3SqsEventNotificationRecordModel, +) from .s3_object_event import ( S3ObjectConfiguration, S3ObjectContext, @@ -130,6 +134,8 @@ "SqsRecordModel", "SqsMsgAttributeModel", "SqsAttributesModel", + "S3SqsEventNotificationModel", + "S3SqsEventNotificationRecordModel", "APIGatewayProxyEventModel", "APIGatewayEventRequestContext", "APIGatewayEventAuthorizer", diff --git a/aws_lambda_powertools/utilities/parser/models/s3_event_notification.py b/aws_lambda_powertools/utilities/parser/models/s3_event_notification.py new file mode 100644 index 00000000000..1bcbc83ac18 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/s3_event_notification.py @@ -0,0 +1,14 @@ +from typing import List + +from pydantic import Json + +from aws_lambda_powertools.utilities.parser.models.s3 import S3Model +from aws_lambda_powertools.utilities.parser.models.sqs import SqsModel, SqsRecordModel + + +class S3SqsEventNotificationRecordModel(SqsRecordModel): + body: Json[S3Model] + + +class S3SqsEventNotificationModel(SqsModel): + Records: List[S3SqsEventNotificationRecordModel] diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index c92a8361b7c..a1c172c20fc 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, List, Optional, Type, Union +from typing import Dict, List, Optional, Sequence, Type, Union from pydantic import BaseModel @@ -63,4 +63,4 @@ class SqsRecordModel(BaseModel): class SqsModel(BaseModel): - Records: List[SqsRecordModel] + Records: Sequence[SqsRecordModel] diff --git a/docs/core/logger.md b/docs/core/logger.md index 8bccdafeec3..2f0472368c3 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -445,6 +445,26 @@ If you prefer configuring it separately, or you'd want to bring this JSON Format --8<-- "examples/logger/src/powertools_formatter_setup.py" ``` +### Observability providers + +!!! note "In this context, an observability provider is an [AWS Lambda Partner](https://go.aws/3HtU6CZ){target="_blank"} offering a platform for logging, metrics, traces, etc." + +You can send logs to the observability provider of your choice via [Lambda Extensions](https://aws.amazon.com/blogs/compute/using-aws-lambda-extensions-to-send-logs-to-custom-destinations/){target="_blank"}. In most cases, you shouldn't need any custom Logger configuration, and logs will be shipped async without any performance impact. + +#### Built-in formatters + +In rare circumstances where JSON logs are not parsed correctly by your provider, we offer built-in formatters to make this transition easier. + +| Provider | Formatter | Notes | +| -------- | --------------------- | ---------------------------------------------------- | +| Datadog | `DatadogLogFormatter` | Modifies default timestamp to use RFC3339 by default | + +You can use import and use them as any other Logger formatter via `logger_formatter` parameter: + +```python hl_lines="2 4" title="Using built-in Logger Formatters" +--8<-- "examples/logger/src/observability_provider_builtin_formatters.py" +``` + ### Migrating from other Loggers If you're migrating from other Loggers, there are few key points to be aware of: [Service parameter](#the-service-parameter), [Inheriting Loggers](#inheriting-loggers), [Overriding Log records](#overriding-log-records), and [Logging exceptions](#logging-exceptions). diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 81acd8999d8..ba9f746e867 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -251,13 +251,15 @@ By default it will skip all previously defined dimensions including default dime ### Flushing metrics manually -If you prefer not to use `log_metrics` because you might want to encapsulate additional logic when doing so, you can manually flush and clear metrics as follows: +If you are using the AWS Lambda Web Adapter project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. ???+ warning - Metrics, dimensions and namespace validation still applies + This does not capture Cold Start metrics, and metric data validation still applies. -```python hl_lines="11-14" title="Manually flushing and clearing metrics from memory" ---8<-- "examples/metrics/src/single_metric.py" +Contrary to the `log_metrics` decorator, you are now also responsible to flush metrics in the event of an exception. + +```python hl_lines="18" title="Manually flushing and clearing metrics from memory" +--8<-- "examples/metrics/src/flush_metrics.py" ``` ### Metrics isolation diff --git a/docs/index.md b/docs/index.md index 91a15dc092e..2aafa480f07 100644 --- a/docs/index.md +++ b/docs/index.md @@ -14,7 +14,7 @@ Powertools is a developer toolkit to implement Serverless best practices and inc You can choose to support us in three ways: - 1) [**Become a reference customers**](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E). This gives us permission to list your company in our documentation. + 1) [**Become a reference customer**](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E). This gives us permission to list your company in our documentation. 2) [**Share your work**](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=community-content&template=share_your_work.yml&title=%5BI+Made+This%5D%3A+%3CTITLE%3E). Blog posts, video, sample projects you used Powertools! @@ -26,8 +26,8 @@ Powertools is a developer toolkit to implement Serverless best practices and inc You can install Powertools using one of the following options: -* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31**](#){: .copyMe}:clipboard: -* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31**](#){: .copyMe}:clipboard: +* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32**](#){: .copyMe}:clipboard: +* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32**](#){: .copyMe}:clipboard: * **Pip**: **[`pip install "aws-lambda-powertools"`](#){: .copyMe}:clipboard:** ??? question "Using Pip? You might need to install additional dependencies." @@ -78,60 +78,60 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:31](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:32](#){: .copyMe}:clipboard: | === "arm64" | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32](#){: .copyMe}:clipboard: | ??? note "Note: Click to expand and copy code snippets for popular frameworks" @@ -144,7 +144,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 ``` === "Serverless framework" @@ -154,7 +154,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 ``` === "CDK" @@ -170,7 +170,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -219,7 +219,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -272,7 +272,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 ❯ amplify push -y @@ -283,7 +283,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 ? Do you want to edit the local lambda function now? No ``` @@ -297,7 +297,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Properties: Architectures: [arm64] Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32 ``` === "Serverless framework" @@ -308,7 +308,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. handler: lambda_function.lambda_handler architecture: arm64 layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32 ``` === "CDK" @@ -324,7 +324,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -374,7 +374,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32"] architectures = ["arm64"] source_code_hash = filebase64sha256("lambda_function_payload.zip") @@ -430,7 +430,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32 ❯ amplify push -y @@ -441,7 +441,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:31 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:32 ? Do you want to edit the local lambda function now? No ``` @@ -449,7 +449,7 @@ You can include Powertools Lambda Layer using [AWS Lambda Console](https://docs. Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. @@ -670,7 +670,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch ## Quick getting started ```bash title="Hello world example using SAM CLI" -sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python +sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.10 --no-tracing ``` ## Features @@ -737,6 +737,31 @@ As a best practice for libraries, Powertools module logging statements are suppr When necessary, you can use `POWERTOOLS_DEBUG` environment variable to enable debugging. This will provide additional information on every internal operation. +## How to support AWS Lambda Powertools for Python? + +### Becoming a reference customer + +Knowing which companies are using this library is important to help prioritize the project internally. If your company is using AWS Lambda Powertools for Python, you can request to have your name and logo added to the README file by raising a [Support Lambda Powertools (become a reference)](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E){target="_blank"} issue. + +The following companies, among others, use Powertools: + +* [CPQi (Exadel Financial Services)](https://cpqi.com/){target="_blank"} +* [CloudZero](https://www.cloudzero.com/){target="_blank"} +* [CyberArk](https://www.cyberark.com/){target="_blank"} +* [globaldatanet](https://globaldatanet.com/){target="_blank"} +* [IMS](https://ims.tech/){target="_blank"} +* [Propellor.ai](https://www.propellor.ai/){target="_blank"} +* [TopSport](https://www.topsport.com.au/){target="_blank"} +* [Trek10](https://www.trek10.com/){target="_blank"} + +### Sharing your work + +Share what you did with Powertools 💞💞. Blog post, workshops, presentation, sample apps and others. Check out what the community has already shared about Powertools [here](https://awslabs.github.io/aws-lambda-powertools-python/latest/we_made_this/). + +### Using Lambda Layer or SAR + +This helps us understand who uses Powertools in a non-intrusive way, and helps us gain future investments for other Powertools languages. When [using Layers](https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer), you can add Powertools as a dev dependency (or as part of your virtual env) to not impact the development process. + ## Tenets These are our core principles to guide our decision making. diff --git a/docs/media/logos/cloudzero-logo.png b/docs/media/logos/cloudzero-logo.png new file mode 100644 index 00000000000..08326a8aaa9 Binary files /dev/null and b/docs/media/logos/cloudzero-logo.png differ diff --git a/docs/media/logos/cpqi.png b/docs/media/logos/cpqi.png new file mode 100644 index 00000000000..4815fd8fe70 Binary files /dev/null and b/docs/media/logos/cpqi.png differ diff --git a/docs/media/logos/cyberark-logo-dark.svg b/docs/media/logos/cyberark-logo-dark.svg new file mode 100644 index 00000000000..982087c5ca3 --- /dev/null +++ b/docs/media/logos/cyberark-logo-dark.svg @@ -0,0 +1,56 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/media/logos/globaldatanet.png b/docs/media/logos/globaldatanet.png new file mode 100644 index 00000000000..d65d33ee35e Binary files /dev/null and b/docs/media/logos/globaldatanet.png differ diff --git a/docs/media/logos/ims-logo.png b/docs/media/logos/ims-logo.png new file mode 100644 index 00000000000..d8ccd148a67 Binary files /dev/null and b/docs/media/logos/ims-logo.png differ diff --git a/docs/media/logos/propellor-ai.png b/docs/media/logos/propellor-ai.png new file mode 100644 index 00000000000..80df0514092 Binary files /dev/null and b/docs/media/logos/propellor-ai.png differ diff --git a/docs/media/logos/topsport.svg b/docs/media/logos/topsport.svg new file mode 100644 index 00000000000..8e35c7caba7 --- /dev/null +++ b/docs/media/logos/topsport.svg @@ -0,0 +1,68 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + diff --git a/docs/media/logos/trek10-logo.jpeg b/docs/media/logos/trek10-logo.jpeg new file mode 100644 index 00000000000..98bab568f64 Binary files /dev/null and b/docs/media/logos/trek10-logo.jpeg differ diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md index 8ae49544419..9965d70d267 100644 --- a/docs/tutorial/index.md +++ b/docs/tutorial/index.md @@ -17,10 +17,14 @@ This tutorial progressively introduces Lambda Powertools core utilities by using Let's clone our sample project before we add one feature at a time. ???+ tip "Tip: Want to skip to the final project?" - Bootstrap directly via SAM CLI: `sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python` + Bootstrap directly via SAM CLI: + + ```shell + sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.10 --no-tracing` + ``` ```bash title="Use SAM CLI to initialize the sample project" -sam init --runtime python3.9 --dependency-manager pip --app-template hello-world --name powertools-quickstart +sam init --runtime python3.10 --dependency-manager pip --app-template hello-world --name powertools-quickstart ``` ### Project structure diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 296ce4f02ac..c4d7dc26e6c 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -508,7 +508,7 @@ Processing batches from Kinesis works in three stages: ### Processing messages from DynamoDB -Processing batches from Kinesis works in three stages: +Processing batches from DynamoDB Streams works in three stages: 1. Instantiate **`BatchProcessor`** and choose **`EventType.DynamoDBStreams`** for the event type 2. Define your function to handle each batch record, and use [`DynamoDBRecord`](data_classes.md#dynamodb-streams){target="_blank"} type annotation for autocompletion diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 169133788ad..04779ccf0f5 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -52,6 +52,22 @@ Same example as above, but using the `event_source` decorator if 'helloworld' in event.path and event.http_method == 'GET': do_something_with(event.body, user) ``` + +Log Data Event for Troubleshooting + +=== "app.py" + + ```python hl_lines="4 8" + from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent + from aws_lambda_powertools.logging.logger import Logger + + logger = Logger(service="hello_logs", level="DEBUG") + + @event_source(data_class=APIGatewayProxyEvent) + def lambda_handler(event: APIGatewayProxyEvent, context): + logger.debug(event) + ``` + **Autocomplete with self-documented properties and methods** ![Utilities Data Classes](../media/utilities_data_classes.png) @@ -1104,3 +1120,28 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda for record in event.records: do_something_with(record.body) ``` + +## Advanced + +### Debugging + +Alternatively, you can print out the fields to obtain more information. All classes come with a `__str__` method that generates a dictionary string which can be quite useful for debugging. + +However, certain events may contain sensitive fields such as `secret_access_key` and `session_token`, which are labeled as `[SENSITIVE]` to prevent any accidental disclosure of confidential information. + +!!! warning "If we fail to deserialize a field value (e.g., JSON), they will appear as `[Cannot be deserialized]`" + +=== "debugging.py" + ```python hl_lines="9" + --8<-- "examples/event_sources/src/debugging.py" + ``` + +=== "debugging_event.json" + ```json hl_lines="28 29" + --8<-- "examples/event_sources/src/debugging_event.json" + ``` +=== "debugging_output.json" + ```json hl_lines="16 17 18" + --8<-- "examples/event_sources/src/debugging_output.json" + ``` + ``` diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md index a01a72ced16..e86fb824faf 100644 --- a/docs/utilities/jmespath_functions.md +++ b/docs/utilities/jmespath_functions.md @@ -64,16 +64,24 @@ We provide built-in envelopes for popular AWS Lambda event sources to easily dec These are all built-in envelopes you can use along with their expression as a reference: -| Envelope | JMESPath expression | -| --------------------------------- | ------------------------------------------------------------- | -| **`API_GATEWAY_REST`** | `powertools_json(body)` | -| **`API_GATEWAY_HTTP`** | `API_GATEWAY_REST` | -| **`SQS`** | `Records[*].powertools_json(body)` | -| **`SNS`** | `Records[0].Sns.Message | powertools_json(@)` | -| **`EVENTBRIDGE`** | `detail` | -| **`CLOUDWATCH_EVENTS_SCHEDULED`** | `EVENTBRIDGE` | -| **`KINESIS_DATA_STREAM`** | `Records[*].kinesis.powertools_json(powertools_base64(data))` | -| **`CLOUDWATCH_LOGS`** | `awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]` | +| Envelope | JMESPath expression | +| --------------------------------- | ----------------------------------------------------------------------------------------- | +| **`API_GATEWAY_HTTP`** | `powertools_json(body)` | +| **`API_GATEWAY_REST`** | `powertools_json(body)` | +| **`CLOUDWATCH_EVENTS_SCHEDULED`** | `detail` | +| **`CLOUDWATCH_LOGS`** | `awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]` | +| **`EVENTBRIDGE`** | `detail` | +| **`KINESIS_DATA_STREAM`** | `Records[*].kinesis.powertools_json(powertools_base64(data))` | +| **`S3_EVENTBRIDGE_SQS`** | `Records[*].powertools_json(body).detail` | +| **`S3_KINESIS_FIREHOSE`** | `records[*].powertools_json(powertools_base64(data)).Records[0]` | +| **`S3_SNS_KINESIS_FIREHOSE`** | `records[*].powertools_json(powertools_base64(data)).powertools_json(Message).Records[0]` | +| **`S3_SNS_SQS`** | `Records[*].powertools_json(body).powertools_json(Message).Records[0]` | +| **`S3_SQS`** | `Records[*].powertools_json(body).Records[0]` | +| **`SNS`** | `Records[0].Sns.Message | powertools_json(@)` | +| **`SQS`** | `Records[*].powertools_json(body)` | + +???+ tip "Using SNS?" + If you don't require SNS metadata, enable [raw message delivery](https://docs.aws.amazon.com/sns/latest/dg/sns-large-payload-raw-message-delivery.html){target="_blank"}. It will reduce multiple payload layers and size, when using SNS in combination with other services (_e.g., SQS, S3, etc_). ## Advanced diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 66103ad474b..38e12c0792d 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -156,25 +156,26 @@ def my_function(): Parser comes with the following built-in models: -| Model name | Description | -| --------------------------------------- | ---------------------------------------------------------------------------- | -| **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams | -| **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge | -| **SqsModel** | Lambda Event Source payload for Amazon SQS | -| **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer | -| **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs | -| **S3Model** | Lambda Event Source payload for Amazon S3 | -| **S3ObjectLambdaEvent** | Lambda Event Source payload for Amazon S3 Object Lambda | -| **S3EventNotificationEventBridgeModel** | Lambda Event Source payload for Amazon S3 Event Notification to EventBridge. | -| **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams | -| **KinesisFirehoseModel** | Lambda Event Source payload for Amazon Kinesis Firehose | -| **SesModel** | Lambda Event Source payload for Amazon Simple Email Service | -| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service | -| **APIGatewayProxyEventModel** | Lambda Event Source payload for Amazon API Gateway | -| **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload | -| **LambdaFunctionUrlModel** | Lambda Event Source payload for Lambda Function URL payload | -| **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload | -| **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload | +| Model name | Description | +| --------------------------------------- | ------------------------------------------------------------------------------------- | +| **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer | +| **APIGatewayProxyEventModel** | Lambda Event Source payload for Amazon API Gateway | +| **APIGatewayProxyEventV2Model** | Lambda Event Source payload for Amazon API Gateway v2 payload | +| **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs | +| **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams | +| **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge | +| **KafkaMskEventModel** | Lambda Event Source payload for AWS MSK payload | +| **KafkaSelfManagedEventModel** | Lambda Event Source payload for self managed Kafka payload | +| **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams | +| **KinesisFirehoseModel** | Lambda Event Source payload for Amazon Kinesis Firehose | +| **LambdaFunctionUrlModel** | Lambda Event Source payload for Lambda Function URL payload | +| **S3EventNotificationEventBridgeModel** | Lambda Event Source payload for Amazon S3 Event Notification to EventBridge. | +| **S3Model** | Lambda Event Source payload for Amazon S3 | +| **S3ObjectLambdaEvent** | Lambda Event Source payload for Amazon S3 Object Lambda | +| **S3SqsEventNotificationModel** | Lambda Event Source payload for S3 event notifications wrapped in SQS event (S3->SQS) | +| **SesModel** | Lambda Event Source payload for Amazon Simple Email Service | +| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service | +| **SqsModel** | Lambda Event Source payload for Amazon SQS | #### Extending built-in models diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index ca5907a5fe1..277a1f91f81 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -141,16 +141,16 @@ We provide built-in envelopes to easily extract the payload from popular event s Here is a handy table with built-in envelopes along with their JMESPath expressions in case you want to build your own. -| Envelope name | JMESPath expression | -| ------------------------------- | ------------------------------------------------------------- | -| **API_GATEWAY_REST** | "powertools_json(body)" | -| **API_GATEWAY_HTTP** | "powertools_json(body)" | -| **SQS** | "Records[*].powertools_json(body)" | -| **SNS** | "Records[0].Sns.Message | powertools_json(@)" | -| **EVENTBRIDGE** | "detail" | -| **CLOUDWATCH_EVENTS_SCHEDULED** | "detail" | -| **KINESIS_DATA_STREAM** | "Records[*].kinesis.powertools_json(powertools_base64(data))" | -| **CLOUDWATCH_LOGS** | "awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]" | +| Envelope | JMESPath expression | +| --------------------------------- | ------------------------------------------------------------------------ | +| **`API_GATEWAY_HTTP`** | `powertools_json(body)` | +| **`API_GATEWAY_REST`** | `powertools_json(body)` | +| **`CLOUDWATCH_EVENTS_SCHEDULED`** | `detail` | +| **`CLOUDWATCH_LOGS`** | `awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]` | +| **`EVENTBRIDGE`** | `detail` | +| **`KINESIS_DATA_STREAM`** | `Records[*].kinesis.powertools_json(powertools_base64(data))` | +| **`SNS`** | `Records[0].Sns.Message | powertools_json(@)` | +| **`SQS`** | `Records[*].powertools_json(body)` | ## Advanced diff --git a/docs/we_made_this.md b/docs/we_made_this.md index 950a8c9f24d..a9022b68e5d 100644 --- a/docs/we_made_this.md +++ b/docs/we_made_this.md @@ -133,3 +133,13 @@ This repository provides a working, deployable, open source based, AWS Lambda ha This handler embodies Serverless best practices and has all the bells and whistles for a proper production ready handler. It uses many of the AWS Lambda Powertools utilities for Python. :material-github: [github.com/ran-isenberg/aws-lambda-handler-cookbook](https://github.com/ran-isenberg/aws-lambda-handler-cookbook){:target="_blank"} + +### Serverless Transactional Message App + +> **Author: [Santiago Garcia Arango](mailto:san99tiago@gmail.com) [:material-web:](https://san99tiago.com/){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/san99tiago/){target="_blank"}** + +This repository contains a well documented example of a Transactional Messages App that illustrates how to use Lambda PowerTools to process SQS messages in batches (with IaC on top of CDK). + +It uses LambdaPowerTools Logger, Tracing, DataClasses and includes unit tests. + +:material-github: [github.com/san99tiago/aws-cdk-transactional-messages](https://github.com/san99tiago/aws-cdk-transactional-messages){:target="_blank"} diff --git a/examples/event_sources/src/debugging.py b/examples/event_sources/src/debugging.py new file mode 100644 index 00000000000..a03bf823885 --- /dev/null +++ b/examples/event_sources/src/debugging.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.utilities.data_classes import ( + CodePipelineJobEvent, + event_source, +) + + +@event_source(data_class=CodePipelineJobEvent) +def lambda_handler(event, context): + print(event) diff --git a/examples/event_sources/src/debugging_event.json b/examples/event_sources/src/debugging_event.json new file mode 100644 index 00000000000..a95c3d57e86 --- /dev/null +++ b/examples/event_sources/src/debugging_event.json @@ -0,0 +1,34 @@ +{ + "CodePipeline.job": { + "id": "11111111-abcd-1111-abcd-111111abcdef", + "accountId": "111111111111", + "data": { + "actionConfiguration": { + "configuration": { + "FunctionName": "MyLambdaFunctionForAWSCodePipeline", + "UserParameters": "some-input-such-as-a-URL" + } + }, + "inputArtifacts": [ + { + "name": "ArtifactName", + "revision": null, + "location": { + "type": "S3", + "s3Location": { + "bucketName": "the name of the bucket configured as the pipeline artifact store in Amazon S3, for example codepipeline-us-east-2-1234567890", + "objectKey": "the name of the application, for example CodePipelineDemoApplication.zip" + } + } + } + ], + "outputArtifacts": [], + "artifactCredentials": { + "accessKeyId": "AKIAIOSFODNN7EXAMPLE", + "secretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", + "sessionToken": "MIICiTCCAfICCQD6m7oRw0uXOjANBgkqhkiG9w0BAQUFADCBiDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAldBMRAwDgYDVQQHEwdTZWF0dGxlMQ8wDQYDVQQKEwZBbWF6b24xFDASBgNVBAsTC0lBTSBDb25zb2xlMRIwEAYDVQQDEwlUZXN0Q2lsYWMxHzAdBgkqhkiG9w0BCQEWEG5vb25lQGFtYXpvbi5jb20wHhcNMTEwNDI1MjA0NTIxWhcNMTIwNDI0MjA0NTIxWjCBiDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAldBMRAwDgYDVQQHEwdTZWF0dGxlMQ8wDQYDVQQKEwZBbWF6b24xFDASBgNVBAsTC0lBTSBDb25zb2xlMRIwEAYDVQQDEwlUZXN0Q2lsYWMxHzAdBgkqhkiG9w0BCQEWEG5vb25lQGFtYXpvbi5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMaK0dn+a4GmWIWJ21uUSfwfEvySWtC2XADZ4nB+BLYgVIk60CpiwsZ3G93vUEIO3IyNoH/f0wYK8m9TrDHudUZg3qX4waLG5M43q7Wgc/MbQITxOUSQv7c7ugFFDzQGBzZswY6786m86gpEIbb3OhjZnzcvQAaRHhdlQWIMm2nrAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEAtCu4nUhVVxYUntneD9+h8Mg9q6q+auNKyExzyLwaxlAoo7TJHidbtS4J5iNmZgXL0FkbFFBjvSfpJIlJ00zbhNYS5f6GuoEDmFJl0ZxBHjJnyp378OD8uTs7fLvjx79LjSTbNYiytVbZPQUQ5Yaxu2jXnimvw3rrszlaEXAMPLE=" + }, + "continuationToken": "A continuation token if continuing job" + } + } + } diff --git a/examples/event_sources/src/debugging_output.json b/examples/event_sources/src/debugging_output.json new file mode 100644 index 00000000000..f13d6380afe --- /dev/null +++ b/examples/event_sources/src/debugging_output.json @@ -0,0 +1,50 @@ +{ + "account_id":"111111111111", + "data":{ + "action_configuration":{ + "configuration":{ + "decoded_user_parameters":"[Cannot be deserialized]", + "function_name":"MyLambdaFunctionForAWSCodePipeline", + "raw_event":"[SENSITIVE]", + "user_parameters":"some-input-such-as-a-URL" + }, + "raw_event":"[SENSITIVE]" + }, + "artifact_credentials":{ + "access_key_id":"AKIAIOSFODNN7EXAMPLE", + "expiration_time":"None", + "raw_event":"[SENSITIVE]", + "secret_access_key":"[SENSITIVE]", + "session_token":"[SENSITIVE]" + }, + "continuation_token":"A continuation token if continuing job", + "encryption_key":"None", + "input_artifacts":[ + { + "location":{ + "get_type":"S3", + "raw_event":"[SENSITIVE]", + "s3_location":{ + "bucket_name":"the name of the bucket configured as the pipeline artifact store in Amazon S3, for example codepipeline-us-east-2-1234567890", + "key":"the name of the application, for example CodePipelineDemoApplication.zip", + "object_key":"the name of the application, for example CodePipelineDemoApplication.zip", + "raw_event":"[SENSITIVE]" + } + }, + "name":"ArtifactName", + "raw_event":"[SENSITIVE]", + "revision":"None" + } + ], + "output_artifacts":[ + + ], + "raw_event":"[SENSITIVE]" + }, + "decoded_user_parameters":"[Cannot be deserialized]", + "get_id":"11111111-abcd-1111-abcd-111111abcdef", + "input_bucket_name":"the name of the bucket configured as the pipeline artifact store in Amazon S3, for example codepipeline-us-east-2-1234567890", + "input_object_key":"the name of the application, for example CodePipelineDemoApplication.zip", + "raw_event":"[SENSITIVE]", + "user_parameters":"some-input-such-as-a-URL" + } diff --git a/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py b/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py index 31ae6cf268c..d078e396519 100644 --- a/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py +++ b/examples/jmespath_functions/src/extract_data_from_builtin_envelope.py @@ -1,12 +1,18 @@ +from __future__ import annotations + +from aws_lambda_powertools import Logger from aws_lambda_powertools.utilities.jmespath_utils import ( envelopes, extract_data_from_envelope, ) from aws_lambda_powertools.utilities.typing import LambdaContext +logger = Logger() + def handler(event: dict, context: LambdaContext) -> dict: - payload = extract_data_from_envelope(data=event, envelope=envelopes.SQS) - customer_id = payload.get("customerId") # now deserialized + records: list = extract_data_from_envelope(data=event, envelope=envelopes.SQS) + for record in records: # records is a list + logger.info(record.get("customerId")) # now deserialized - return {"customer_id": customer_id, "message": "success", "statusCode": 200} + return {"message": "success", "statusCode": 200} diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml index 546367280d4..c45f6cd5e3b 100644 --- a/examples/logger/sam/template.yaml +++ b/examples/logger/sam/template.yaml @@ -5,7 +5,7 @@ Description: AWS Lambda Powertools Tracer doc examples Globals: Function: Timeout: 5 - Runtime: python3.9 + Runtime: python3.10 Tracing: Active Environment: Variables: @@ -14,7 +14,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 Resources: LoggerLambdaHandlerExample: diff --git a/examples/logger/src/append_keys.py b/examples/logger/src/append_keys.py index 0ef9cbe0f63..3553f131ba0 100644 --- a/examples/logger/src/append_keys.py +++ b/examples/logger/src/append_keys.py @@ -4,7 +4,7 @@ logger = Logger() -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: order_id = event.get("order_id") # this will ensure order_id key always has the latest value before logging diff --git a/examples/logger/src/append_keys_extra.py b/examples/logger/src/append_keys_extra.py index 0c66425f775..066bff9f084 100644 --- a/examples/logger/src/append_keys_extra.py +++ b/examples/logger/src/append_keys_extra.py @@ -4,7 +4,7 @@ logger = Logger() -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: fields = {"request_id": "1123"} logger.info("Collecting payment", extra=fields) diff --git a/examples/logger/src/append_keys_kwargs.py b/examples/logger/src/append_keys_kwargs.py index 5885c7e2bd6..285e0c15471 100644 --- a/examples/logger/src/append_keys_kwargs.py +++ b/examples/logger/src/append_keys_kwargs.py @@ -4,7 +4,7 @@ logger = Logger() -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: logger.info("Collecting payment", request_id="1123") return "hello world" diff --git a/examples/logger/src/append_keys_vs_extra.py b/examples/logger/src/append_keys_vs_extra.py index ab67ceb6932..432dd1c23aa 100644 --- a/examples/logger/src/append_keys_vs_extra.py +++ b/examples/logger/src/append_keys_vs_extra.py @@ -12,7 +12,7 @@ class PaymentError(Exception): ... -def handler(event, context): +def lambda_handler(event, context): logger.append_keys(payment_id="123456789") charge_id = event.get("charge_id", "") diff --git a/examples/logger/src/bring_your_own_formatter_from_scratch.py b/examples/logger/src/bring_your_own_formatter_from_scratch.py index c591b421cc6..9b616595824 100644 --- a/examples/logger/src/bring_your_own_formatter_from_scratch.py +++ b/examples/logger/src/bring_your_own_formatter_from_scratch.py @@ -39,5 +39,5 @@ def format(self, record: logging.LogRecord) -> str: # noqa: A003 @logger.inject_lambda_context -def handler(event, context): +def lambda_handler(event, context): logger.info("Collecting payment") diff --git a/examples/logger/src/clear_state.py b/examples/logger/src/clear_state.py index ec842f034c1..4cfa44a3ae8 100644 --- a/examples/logger/src/clear_state.py +++ b/examples/logger/src/clear_state.py @@ -5,7 +5,7 @@ @logger.inject_lambda_context(clear_state=True) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: if event.get("special_key"): # Should only be available in the first request log # as the second request doesn't contain `special_key` diff --git a/examples/logger/src/enabling_boto_logging.py b/examples/logger/src/enabling_boto_logging.py index cce8dc6f8e7..0a5b953c603 100644 --- a/examples/logger/src/enabling_boto_logging.py +++ b/examples/logger/src/enabling_boto_logging.py @@ -12,7 +12,7 @@ client = boto3.client("s3") -def handler(event: Dict, context: LambdaContext) -> List: +def lambda_handler(event: Dict, context: LambdaContext) -> List: response = client.list_buckets() return response.get("Buckets", []) diff --git a/examples/logger/src/fake_lambda_context_for_logger_module.py b/examples/logger/src/fake_lambda_context_for_logger_module.py index fcb94f99db1..f5140febaba 100644 --- a/examples/logger/src/fake_lambda_context_for_logger_module.py +++ b/examples/logger/src/fake_lambda_context_for_logger_module.py @@ -5,7 +5,7 @@ @logger.inject_lambda_context -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: logger.info("Collecting payment") return "hello world" diff --git a/examples/logger/src/inject_lambda_context.py b/examples/logger/src/inject_lambda_context.py index 0bdf203565d..b55133ef59b 100644 --- a/examples/logger/src/inject_lambda_context.py +++ b/examples/logger/src/inject_lambda_context.py @@ -5,7 +5,7 @@ @logger.inject_lambda_context -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: logger.info("Collecting payment") # You can log entire objects too diff --git a/examples/logger/src/log_incoming_event.py b/examples/logger/src/log_incoming_event.py index 264a568c4ba..fb962b063c6 100644 --- a/examples/logger/src/log_incoming_event.py +++ b/examples/logger/src/log_incoming_event.py @@ -5,5 +5,5 @@ @logger.inject_lambda_context(log_event=True) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: return "hello world" diff --git a/examples/logger/src/logger_reuse.py b/examples/logger/src/logger_reuse.py index a232eadd979..db2f8a90d8b 100644 --- a/examples/logger/src/logger_reuse.py +++ b/examples/logger/src/logger_reuse.py @@ -7,7 +7,7 @@ @logger.inject_lambda_context -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: inject_payment_id(context=event) logger.info("Collecting payment") return "hello world" diff --git a/examples/logger/src/logging_exceptions.py b/examples/logger/src/logging_exceptions.py index 31df43cd663..05e5c1a1e15 100644 --- a/examples/logger/src/logging_exceptions.py +++ b/examples/logger/src/logging_exceptions.py @@ -7,7 +7,7 @@ logger = Logger() -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: try: ret = requests.get(ENDPOINT) ret.raise_for_status() diff --git a/examples/logger/src/logging_inheritance_bad.py b/examples/logger/src/logging_inheritance_bad.py index 18510720d9e..0df805ccd21 100644 --- a/examples/logger/src/logging_inheritance_bad.py +++ b/examples/logger/src/logging_inheritance_bad.py @@ -10,7 +10,7 @@ @logger.inject_lambda_context -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: inject_payment_id(context=event) return "hello world" diff --git a/examples/logger/src/logging_inheritance_good.py b/examples/logger/src/logging_inheritance_good.py index f7e29d09df7..6fc9a4e0d5d 100644 --- a/examples/logger/src/logging_inheritance_good.py +++ b/examples/logger/src/logging_inheritance_good.py @@ -10,7 +10,7 @@ @logger.inject_lambda_context -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: inject_payment_id(context=event) return "hello world" diff --git a/examples/logger/src/logging_uncaught_exceptions.py b/examples/logger/src/logging_uncaught_exceptions.py index 1b43c67914a..53c8908ce01 100644 --- a/examples/logger/src/logging_uncaught_exceptions.py +++ b/examples/logger/src/logging_uncaught_exceptions.py @@ -7,7 +7,7 @@ logger = Logger(log_uncaught_exceptions=True) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: ret = requests.get(ENDPOINT) # HTTP 4xx/5xx status will lead to requests.HTTPError # Logger will log this exception before this program exits non-successfully diff --git a/examples/logger/src/observability_provider_builtin_formatters.py b/examples/logger/src/observability_provider_builtin_formatters.py new file mode 100644 index 00000000000..3817f1f1b55 --- /dev/null +++ b/examples/logger/src/observability_provider_builtin_formatters.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.formatters.datadog import DatadogLogFormatter + +logger = Logger(service="payment", logger_formatter=DatadogLogFormatter()) +logger.info("hello") diff --git a/examples/logger/src/remove_keys.py b/examples/logger/src/remove_keys.py index 763387d9399..5f48142564b 100644 --- a/examples/logger/src/remove_keys.py +++ b/examples/logger/src/remove_keys.py @@ -4,7 +4,7 @@ logger = Logger() -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: logger.append_keys(sample_key="value") logger.info("Collecting payment") diff --git a/examples/logger/src/sampling_debug_logs.py b/examples/logger/src/sampling_debug_logs.py index 3bbb1cdb920..042c1f4a54a 100644 --- a/examples/logger/src/sampling_debug_logs.py +++ b/examples/logger/src/sampling_debug_logs.py @@ -6,7 +6,7 @@ logger = Logger(service="payment", sample_rate=0.1) -def handler(event: dict, context: LambdaContext): +def lambda_handler(event: dict, context: LambdaContext): logger.debug("Verifying whether order_id is present") logger.info("Collecting payment") diff --git a/examples/logger/src/set_correlation_id.py b/examples/logger/src/set_correlation_id.py index 3aa0bc5f2be..eacae388d83 100644 --- a/examples/logger/src/set_correlation_id.py +++ b/examples/logger/src/set_correlation_id.py @@ -5,7 +5,7 @@ @logger.inject_lambda_context(correlation_id_path="headers.my_request_id_header") -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: logger.debug(f"Correlation ID => {logger.get_correlation_id()}") logger.info("Collecting payment") diff --git a/examples/logger/src/set_correlation_id_jmespath.py b/examples/logger/src/set_correlation_id_jmespath.py index 049bc70a957..386115c9d2d 100644 --- a/examples/logger/src/set_correlation_id_jmespath.py +++ b/examples/logger/src/set_correlation_id_jmespath.py @@ -6,7 +6,7 @@ @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: logger.debug(f"Correlation ID => {logger.get_correlation_id()}") logger.info("Collecting payment") diff --git a/examples/logger/src/set_correlation_id_method.py b/examples/logger/src/set_correlation_id_method.py index 74eaa338df6..5aca939e067 100644 --- a/examples/logger/src/set_correlation_id_method.py +++ b/examples/logger/src/set_correlation_id_method.py @@ -5,7 +5,7 @@ logger = Logger() -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: request = APIGatewayProxyEvent(event) logger.set_correlation_id(request.request_context.request_id) diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml index 14bb92be061..9f2784680e5 100644 --- a/examples/metrics/sam/template.yaml +++ b/examples/metrics/sam/template.yaml @@ -5,7 +5,7 @@ Description: AWS Lambda Powertools Metrics doc examples Globals: Function: Timeout: 5 - Runtime: python3.9 + Runtime: python3.10 Tracing: Active Environment: Variables: @@ -15,7 +15,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 Resources: CaptureLambdaHandlerExample: diff --git a/examples/metrics/src/manual_flush.py b/examples/metrics/src/flush_metrics.py similarity index 62% rename from examples/metrics/src/manual_flush.py rename to examples/metrics/src/flush_metrics.py index def0f845d08..a66ce07cbf7 100644 --- a/examples/metrics/src/manual_flush.py +++ b/examples/metrics/src/flush_metrics.py @@ -1,5 +1,3 @@ -import json - from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit from aws_lambda_powertools.utilities.typing import LambdaContext @@ -7,8 +5,14 @@ metrics = Metrics() -def lambda_handler(event: dict, context: LambdaContext): +def book_flight(flight_id: str, **kwargs): + # logic to book flight + ... metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) - your_metrics_object = metrics.serialize_metric_set() - metrics.clear_metrics() - print(json.dumps(your_metrics_object)) + + +def lambda_handler(event: dict, context: LambdaContext): + try: + book_flight(flight_id=event.get("flight_id", "")) + finally: + metrics.flush_metrics() diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml index 53f4277967a..788c007cd86 100644 --- a/examples/tracer/sam/template.yaml +++ b/examples/tracer/sam/template.yaml @@ -5,7 +5,7 @@ Description: AWS Lambda Powertools Tracer doc examples Globals: Function: Timeout: 5 - Runtime: python3.9 + Runtime: python3.10 Tracing: Active Environment: Variables: @@ -13,7 +13,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:31 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:32 Resources: CaptureLambdaHandlerExample: diff --git a/examples/tracer/src/capture_lambda_handler.py b/examples/tracer/src/capture_lambda_handler.py index f5d2c1efcea..8ca2503076a 100644 --- a/examples/tracer/src/capture_lambda_handler.py +++ b/examples/tracer/src/capture_lambda_handler.py @@ -10,6 +10,6 @@ def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/capture_method.py b/examples/tracer/src/capture_method.py index edf1ed719f4..da50356b56c 100644 --- a/examples/tracer/src/capture_method.py +++ b/examples/tracer/src/capture_method.py @@ -11,6 +11,6 @@ def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/capture_method_async.py b/examples/tracer/src/capture_method_async.py index e142ef8f163..a01926ab1e1 100644 --- a/examples/tracer/src/capture_method_async.py +++ b/examples/tracer/src/capture_method_async.py @@ -14,6 +14,6 @@ async def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return asyncio.run(collect_payment(charge_id=charge_id)) diff --git a/examples/tracer/src/capture_method_async_concurrency.py b/examples/tracer/src/capture_method_async_concurrency.py index 82e89070c75..0afd965d35e 100644 --- a/examples/tracer/src/capture_method_async_concurrency.py +++ b/examples/tracer/src/capture_method_async_concurrency.py @@ -26,6 +26,6 @@ async def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return asyncio.run(collect_payment(charge_id=charge_id)) diff --git a/examples/tracer/src/capture_method_context_manager.py b/examples/tracer/src/capture_method_context_manager.py index 083443607ac..9dc6716e69e 100644 --- a/examples/tracer/src/capture_method_context_manager.py +++ b/examples/tracer/src/capture_method_context_manager.py @@ -19,7 +19,7 @@ def collect_payment(charge_id: str) -> Generator[str, None, None]: @tracer.capture_lambda_handler @logger.inject_lambda_context -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") with collect_payment(charge_id=charge_id) as receipt_id: logger.info(f"Processing payment collection for charge {charge_id} with receipt {receipt_id}") diff --git a/examples/tracer/src/capture_method_generators.py b/examples/tracer/src/capture_method_generators.py index 65b87c251e8..7c02f810a27 100644 --- a/examples/tracer/src/capture_method_generators.py +++ b/examples/tracer/src/capture_method_generators.py @@ -12,6 +12,6 @@ def collect_payment(charge_id: str) -> Generator[str, None, None]: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return next(collect_payment(charge_id=charge_id)) diff --git a/examples/tracer/src/disable_capture_error.py b/examples/tracer/src/disable_capture_error.py index 7b7d7e6ad23..59fc2d2376a 100644 --- a/examples/tracer/src/disable_capture_error.py +++ b/examples/tracer/src/disable_capture_error.py @@ -24,7 +24,7 @@ def collect_payment(charge_id: str) -> dict: @tracer.capture_lambda_handler(capture_error=False) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") ret = collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/disable_capture_response.py b/examples/tracer/src/disable_capture_response.py index ffe8230eece..7e3554c4d81 100644 --- a/examples/tracer/src/disable_capture_response.py +++ b/examples/tracer/src/disable_capture_response.py @@ -13,6 +13,6 @@ def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler(capture_response=False) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/disable_capture_response_streaming_body.py b/examples/tracer/src/disable_capture_response_streaming_body.py index 3e458a98eb4..fe9b74713d1 100644 --- a/examples/tracer/src/disable_capture_response_streaming_body.py +++ b/examples/tracer/src/disable_capture_response_streaming_body.py @@ -24,7 +24,7 @@ def fetch_payment_report(payment_id: str) -> StreamingBody: @tracer.capture_lambda_handler(capture_response=False) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: payment_id = event.get("payment_id", "") report = fetch_payment_report(payment_id=payment_id) return report.read().decode() diff --git a/examples/tracer/src/ignore_endpoints.py b/examples/tracer/src/ignore_endpoints.py index 6484cfcf5b0..0fe256aeee9 100644 --- a/examples/tracer/src/ignore_endpoints.py +++ b/examples/tracer/src/ignore_endpoints.py @@ -28,7 +28,7 @@ def collect_payment(charge_id: str) -> dict: @tracer.capture_lambda_handler(capture_error=False) -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") ret = collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/patch_modules.py b/examples/tracer/src/patch_modules.py index 09e7092a85a..e547951d746 100644 --- a/examples/tracer/src/patch_modules.py +++ b/examples/tracer/src/patch_modules.py @@ -9,7 +9,7 @@ @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: ret = requests.get("https://httpbin.org/get") ret.raise_for_status() diff --git a/examples/tracer/src/put_trace_annotations.py b/examples/tracer/src/put_trace_annotations.py index 0d9455c7acd..5c31b384e9d 100644 --- a/examples/tracer/src/put_trace_annotations.py +++ b/examples/tracer/src/put_trace_annotations.py @@ -10,6 +10,6 @@ def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/put_trace_metadata.py b/examples/tracer/src/put_trace_metadata.py index 23b6753677c..cf53b867b37 100644 --- a/examples/tracer/src/put_trace_metadata.py +++ b/examples/tracer/src/put_trace_metadata.py @@ -9,7 +9,7 @@ def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: payment_context = { "charge_id": event.get("charge_id", ""), "merchant_id": event.get("merchant_id", ""), diff --git a/examples/tracer/src/sdk_escape_hatch.py b/examples/tracer/src/sdk_escape_hatch.py index 7f046caff9d..e7024016697 100644 --- a/examples/tracer/src/sdk_escape_hatch.py +++ b/examples/tracer/src/sdk_escape_hatch.py @@ -9,7 +9,7 @@ def collect_payment(charge_id: str) -> str: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") with tracer.provider.in_subsegment("## collect_payment") as subsegment: subsegment.put_annotation(key="PaymentId", value=charge_id) diff --git a/examples/tracer/src/tracer_reuse.py b/examples/tracer/src/tracer_reuse.py index bdfe7bc9d91..d38f22f6871 100644 --- a/examples/tracer/src/tracer_reuse.py +++ b/examples/tracer/src/tracer_reuse.py @@ -7,6 +7,6 @@ @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> str: +def lambda_handler(event: dict, context: LambdaContext) -> str: charge_id = event.get("charge_id", "") return collect_payment(charge_id=charge_id) diff --git a/examples/tracer/src/tracing_aiohttp.py b/examples/tracer/src/tracing_aiohttp.py index 45fe6a46f38..7fb54a8502c 100644 --- a/examples/tracer/src/tracing_aiohttp.py +++ b/examples/tracer/src/tracing_aiohttp.py @@ -20,6 +20,6 @@ async def collect_payment(charge_id: str) -> dict: @tracer.capture_lambda_handler -def handler(event: dict, context: LambdaContext) -> dict: +def lambda_handler(event: dict, context: LambdaContext) -> dict: charge_id = event.get("charge_id", "") return asyncio.run(collect_payment(charge_id=charge_id)) diff --git a/package-lock.json b/package-lock.json index a206f7a7b7b..944c28b98a2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,13 +8,13 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.76.0" + "aws-cdk": "^2.77.0" } }, "node_modules/aws-cdk": { - "version": "2.76.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.76.0.tgz", - "integrity": "sha512-y6VHtqUpYenn6mGIBFbcGGXIoXfKA3o0eGL/eeD/gUJ9TcPrgMLQM1NxSMb5JVsOk5BPPXzGmvB0gBu40utGqg==", + "version": "2.77.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.77.0.tgz", + "integrity": "sha512-f0UpWjBxrFkINqlwL50OpIIC03V39hTzg4+NEBlfUc/ftFX8WQQYyT6h29IfxT9Tgo+YoEMlM1nnH/s1c+VKSw==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -43,9 +43,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.76.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.76.0.tgz", - "integrity": "sha512-y6VHtqUpYenn6mGIBFbcGGXIoXfKA3o0eGL/eeD/gUJ9TcPrgMLQM1NxSMb5JVsOk5BPPXzGmvB0gBu40utGqg==", + "version": "2.77.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.77.0.tgz", + "integrity": "sha512-f0UpWjBxrFkINqlwL50OpIIC03V39hTzg4+NEBlfUc/ftFX8WQQYyT6h29IfxT9Tgo+YoEMlM1nnH/s1c+VKSw==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 9536d277acb..0a074487100 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,6 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.76.0" + "aws-cdk": "^2.77.0" } } diff --git a/poetry.lock b/poetry.lock index 33c837df909..c0ea10b2660 100644 --- a/poetry.lock +++ b/poetry.lock @@ -189,14 +189,14 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.64.0" +version = "1.66.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.64.0.tar.gz", hash = "sha256:0cc5b07dd6ef1de3525d887a3b9557168e04cb44327706a43661653bad30687f"}, - {file = "aws_sam_translator-1.64.0-py3-none-any.whl", hash = "sha256:c44725f12b05d4881e3bc077f70e23ebce56ea78c729acf0ca9f51302b27d304"}, + {file = "aws-sam-translator-1.66.0.tar.gz", hash = "sha256:0b9e9684ea0384fd84f5e722f7fea61896c514b95d3403aa782b69acd485dbbf"}, + {file = "aws_sam_translator-1.66.0-py3-none-any.whl", hash = "sha256:dc4f38cd7ce2a4875d943bf10ba0745901a3a7b7fec1e40b8d13072641630c58"}, ] [package.dependencies] @@ -206,7 +206,7 @@ pydantic = ">=1.8,<2.0" typing-extensions = ">=4.4,<5" [package.extras] -dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.254)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.1.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.1.0,<1.2.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.261)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" @@ -370,18 +370,18 @@ files = [ [[package]] name = "cfn-lint" -version = "0.77.2" +version = "0.77.4" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.77.2.tar.gz", hash = "sha256:a720fdbd68b7ada0fcef2ee65fc17c67f5dbd03797d9117eee7c18bb2cb49a2c"}, - {file = "cfn_lint-0.77.2-py3-none-any.whl", hash = "sha256:d1b508824ed47d622dee07f270f04a7cbbe05d2230d7bfb10641964e6d65500a"}, + {file = "cfn-lint-0.77.4.tar.gz", hash = "sha256:0aa67e28c992b84ad286539de59a9185f51d721d54ad539f6afe1b477836d8cd"}, + {file = "cfn_lint-0.77.4-py3-none-any.whl", hash = "sha256:b348589be12c12dc5ab4ba801fb430f441bffe76e5ffdf907088abcbeb74271d"}, ] [package.dependencies] -aws-sam-translator = ">=1.64.0" +aws-sam-translator = ">=1.65.0" jschema-to-python = ">=1.2.3,<1.3.0" jsonpatch = "*" jsonschema = ">=3.0,<5" @@ -536,63 +536,63 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "coverage" -version = "7.2.3" +version = "7.2.5" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, - {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, - {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, - {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, - {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, - {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, - {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, - {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, - {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, - {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, - {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, - {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, - {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, - {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, - {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, + {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, + {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, + {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, + {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, + {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, + {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, + {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, + {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, + {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, + {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, + {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, + {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, + {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, ] [package.dependencies] @@ -1091,14 +1091,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.5.0" +version = "6.6.0" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.5.0-py3-none-any.whl", hash = "sha256:03ba783c3a2c69d751b109fc0c94a62c51f581b3d6acf8ed1331b6d5729321ff"}, - {file = "importlib_metadata-6.5.0.tar.gz", hash = "sha256:7a8bdf1bc3a726297f5cfbc999e6e7ff6b4fa41b26bba4afc580448624460045"}, + {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, + {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, ] [package.dependencies] @@ -1574,14 +1574,14 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.6" +version = "9.1.9" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.6-py3-none-any.whl", hash = "sha256:f2eb1d40db89da9922944833c1387207408f8937e1c2b46ab86e0c8f170b71e0"}, - {file = "mkdocs_material-9.1.6.tar.gz", hash = "sha256:2e555152f9771646bfa62dc78a86052876183eff69ce30db03a33e85702b21fc"}, + {file = "mkdocs_material-9.1.9-py3-none-any.whl", hash = "sha256:7db24261cb17400e132c46d17eea712bfe71056d892a9beba32cf68210297141"}, + {file = "mkdocs_material-9.1.9.tar.gz", hash = "sha256:74d8da1371ab3a326868fe47bae3cbc4aa22e93c048b4ca5117e6817b88bd734"}, ] [package.dependencies] @@ -1750,14 +1750,14 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-lambda" -version = "1.26.115" -description = "Type annotations for boto3.Lambda 1.26.115 service generated with mypy-boto3-builder 7.14.5" +version = "1.26.122" +description = "Type annotations for boto3.Lambda 1.26.122 service generated with mypy-boto3-builder 7.14.5" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.26.115.tar.gz", hash = "sha256:f612eca8f0e418e66d577b5609f0119c4934a7637ce1342d3c1cfc0d065cd42d"}, - {file = "mypy_boto3_lambda-1.26.115-py3-none-any.whl", hash = "sha256:0d418bb0d6c16c6a83e159dae71f8c6dff663c50dab125bb1518bf6c29f2ed83"}, + {file = "mypy-boto3-lambda-1.26.122.tar.gz", hash = "sha256:b46d153f69b407c76d17ba97390fad9285215fa3dcf484bb7d6ffa0880fca746"}, + {file = "mypy_boto3_lambda-1.26.122-py3-none-any.whl", hash = "sha256:2839045d23b48f7a99ac3fdc71bcecde15998245c6e515b1cf2e8f589a9ae23e"}, ] [package.dependencies] @@ -1825,18 +1825,18 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-xray" -version = "1.26.11.post1" -description = "Type annotations for boto3.XRay 1.26.11 service generated with mypy-boto3-builder 7.11.10" +version = "1.26.122" +description = "Type annotations for boto3.XRay 1.26.122 service generated with mypy-boto3-builder 7.14.5" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-xray-1.26.11.post1.tar.gz", hash = "sha256:e720a766571b4f3e1e35193de74e6dfae21b0f641286aa39f7b938be43150ac0"}, - {file = "mypy_boto3_xray-1.26.11.post1-py3-none-any.whl", hash = "sha256:e22bf1e7fa6a43c52ff6cd3e82892674929b75bb62ab3b0fe3d4c91ede710e1f"}, + {file = "mypy-boto3-xray-1.26.122.tar.gz", hash = "sha256:0231b717443e6eafe4ff689423c5b54a5ee47682f7c19d3f66bfe7e00e87bf7c"}, + {file = "mypy_boto3_xray-1.26.122-py3-none-any.whl", hash = "sha256:6cfe3167ecb92942519d8334bb145d83a6d727bdb7b812c1d9692f938caf6a7f"}, ] [package.dependencies] -typing-extensions = ">=4.1.0" +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-extensions" @@ -2798,14 +2798,14 @@ files = [ [[package]] name = "types-requests" -version = "2.28.11.17" +version = "2.29.0.0" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.28.11.17.tar.gz", hash = "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0"}, - {file = "types_requests-2.28.11.17-py3-none-any.whl", hash = "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b"}, + {file = "types-requests-2.29.0.0.tar.gz", hash = "sha256:c86f4a955d943d2457120dbe719df24ef0924e11177164d10a0373cf311d7b4d"}, + {file = "types_requests-2.29.0.0-py3-none-any.whl", hash = "sha256:4cf6e323e856c779fbe8815bb977a5bf5d6c5034713e4c17ff2a9a20610f5b27"}, ] [package.dependencies] @@ -3035,4 +3035,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "d802cb3b02f4b58fb01497d07d26f679cf4ad3df9c0eaaed86cf9b6472453680" +content-hash = "302a00b68b6e28a6fd1258bb3c093daca2547a71586b0b44a5e162351b39bfe2" diff --git a/pyproject.toml b/pyproject.toml index 1ee2845cdd1..365cc2e80d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "2.14.1" +version = "2.15.0" description = "AWS Lambda Powertools is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] @@ -74,19 +74,19 @@ mypy-boto3-appconfig = "^1.26.71" mypy-boto3-cloudformation = "^1.26.108" mypy-boto3-cloudwatch = "^1.26.99" mypy-boto3-dynamodb = "^1.26.115" -mypy-boto3-lambda = "^1.26.115" +mypy-boto3-lambda = "^1.26.122" mypy-boto3-logs = "^1.26.53" mypy-boto3-secretsmanager = "^1.26.116" mypy-boto3-ssm = "^1.26.97" mypy-boto3-s3 = "^1.26.116" -mypy-boto3-xray = "^1.26.11" -types-requests = "^2.28.11" +mypy-boto3-xray = "^1.26.122" +types-requests = "^2.29.0" typing-extensions = "^4.4.0" -mkdocs-material = "^9.1.6" +mkdocs-material = "^9.1.9" filelock = "^3.12.0" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.26.70" -importlib-metadata = "^6.5" +importlib-metadata = "^6.6" ijson = "^3.2.0" typed-ast = { version = "^1.5.4", python = "< 3.8"} hvac = "^1.1.0" @@ -101,7 +101,7 @@ all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] aws-sdk = ["boto3"] [tool.poetry.group.dev.dependencies] -cfn-lint = "0.77.2" +cfn-lint = "0.77.4" mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" diff --git a/tests/events/s3SqsEvent.json b/tests/events/s3SqsEvent.json new file mode 100644 index 00000000000..55863af12b0 --- /dev/null +++ b/tests/events/s3SqsEvent.json @@ -0,0 +1,22 @@ +{ + "Records":[ + { + "messageId":"ca3e7a89-c358-40e5-8aa0-5da01403c267", + "receiptHandle":"AQEBE7XoI7IQRLF7SrpiW9W4BanmOWe8UtVDbv6/CEZYKf/OktSNIb4j689tQfR4k44V/LY20lZ5VpxYt2GTYCsSLKTcBalTJaRX9CKu/hVqy/23sSNiKxnP56D+VLSn+hU275+AP1h4pUL0d9gLdRB2haX8xiM+LcGfis5Jl8BBXtoxKRF60O87O9/NvCmmXLeqkJuexfyEZNyed0fFCRXFXSjbmThG0OIQgcrGI8glBRGPA8htns58VtXFsSaPYNoqP3p5n6+ewKKVLD0lfm+0DlnLKRa+mjvFBaSer9KK1ff+Aq6zJ6HynPwADj+aF70Hwimc2zImYe51SLEF/E2csYlMNZYI/2qXW0m9R7wJ/XDTV4g2+h+BMTxsKnJQ6NQd", + "body":"{\"Records\":[{\"eventVersion\":\"2.1\",\"eventSource\":\"aws:s3\",\"awsRegion\":\"us-east-1\",\"eventTime\":\"2023-04-12T20:43:38.021Z\",\"eventName\":\"ObjectCreated:Put\",\"userIdentity\":{\"principalId\":\"A1YQ72UWCM96UF\"},\"requestParameters\":{\"sourceIPAddress\":\"93.108.161.96\"},\"responseElements\":{\"x-amz-request-id\":\"YMSSR8BZJ2Y99K6P\",\"x-amz-id-2\":\"6ASrUfj5xpn859fIq+6FXflOex/SKl/rjfiMd7wRzMg/zkHKR22PDpnh7KD3uq//cuOTbdX4DInN5eIs+cR0dY1z2Mc5NDP/\"},\"s3\":{\"s3SchemaVersion\":\"1.0\",\"configurationId\":\"SNS\",\"bucket\":{\"name\":\"xxx\",\"ownerIdentity\":{\"principalId\":\"A1YQ72UWCM96UF\"},\"arn\":\"arn:aws:s3:::xxx\"},\"object\":{\"key\":\"test.pdf\",\"size\":104681,\"eTag\":\"2e3ad1e983318bbd8e73b080e2997980\",\"versionId\":\"yd3d4HaWOT2zguDLvIQLU6ptDTwKBnQV\",\"sequencer\":\"00643717F9F8B85354\"}}}]}", + "attributes":{ + "ApproximateReceiveCount":"1", + "SentTimestamp":"1681332219270", + "SenderId":"AIDAJHIPRHEMV73VRJEBU", + "ApproximateFirstReceiveTimestamp":"1681332239270" + }, + "messageAttributes":{ + + }, + "md5OfBody":"16f4460f4477d8d693a5abe94fdbbd73", + "eventSource":"aws:sqs", + "eventSourceARN":"arn:aws:sqs:us-east-1:123456789012:SQS", + "awsRegion":"us-east-1" + } + ] + } diff --git a/tests/events/sqsEvent.json b/tests/events/sqsEvent.json index ef03b128943..2bfcd1c7b8f 100644 --- a/tests/events/sqsEvent.json +++ b/tests/events/sqsEvent.json @@ -25,7 +25,7 @@ { "messageId": "2e1424d4-f796-459a-8184-9c92662be6da", "receiptHandle": "AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...", - "body": "Test message2.", + "body": "{\"message\": \"foo1\"}", "attributes": { "ApproximateReceiveCount": "1", "SentTimestamp": "1545082650636", @@ -39,4 +39,4 @@ "awsRegion": "us-east-2" } ] -} \ No newline at end of file +} diff --git a/tests/functional/parser/test_s3.py b/tests/functional/parser/test_s3.py index cd903f3052c..f6ed3a5422e 100644 --- a/tests/functional/parser/test_s3.py +++ b/tests/functional/parser/test_s3.py @@ -6,8 +6,7 @@ from tests.functional.utils import load_event -@event_parser(model=S3Model) -def handle_s3(event: S3Model, _: LambdaContext): +def assert_s3(event: S3Model): records = list(event.Records) assert len(records) == 1 record: S3RecordModel = records[0] @@ -41,6 +40,11 @@ def handle_s3(event: S3Model, _: LambdaContext): assert record.glacierEventData is None +@event_parser(model=S3Model) +def handle_s3(event: S3Model, _: LambdaContext): + assert_s3(event) + + @event_parser(model=S3Model) def handle_s3_glacier(event: S3Model, _: LambdaContext): records = list(event.Records) diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 37b934d478e..b3a24b0865a 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -113,6 +113,47 @@ def message(self) -> str: assert DataClassSample(data1).raw_event is data1 +def test_dict_wrapper_with_default_custom_json_deserializer(): + class DataClassSample(DictWrapper): + @property + def json_body(self) -> dict: + return self._json_deserializer(self["body"]) + + data = {"body": '{"message": "foo1"}'} + event = DataClassSample(data=data) + assert event.json_body == json.loads(data["body"]) + + +def test_dict_wrapper_with_valid_custom_json_deserializer(): + class DataClassSample(DictWrapper): + @property + def json_body(self) -> dict: + return self._json_deserializer(self["body"]) + + def fake_json_deserializer(record: dict): + return json.loads(record) + + data = {"body": '{"message": "foo1"}'} + event = DataClassSample(data=data, json_deserializer=fake_json_deserializer) + assert event.json_body == json.loads(data["body"]) + + +def test_dict_wrapper_with_invalid_custom_json_deserializer(): + class DataClassSample(DictWrapper): + @property + def json_body(self) -> dict: + return self._json_deserializer(self["body"]) + + def fake_json_deserializer() -> None: + # invalid fn signature should raise TypeError + pass + + data = {"body": {"message": "foo1"}} + with pytest.raises(TypeError): + event = DataClassSample(data=data, json_deserializer=fake_json_deserializer) + assert event.json_body == {"message": "foo1"} + + def test_dict_wrapper_implements_mapping(): class DataClassSample(DictWrapper): pass @@ -126,6 +167,150 @@ class DataClassSample(DictWrapper): assert event_source.items() == data.items() +def test_dict_wrapper_str_no_property(): + """ + Checks that the _properties function returns + only the "raw_event", and the resulting string + notes it as sensitive. + """ + + class DataClassSample(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + event_source = DataClassSample({}) + assert str(event_source) == "{'raw_event': '[SENSITIVE]'}" + + +def test_dict_wrapper_str_single_property(): + """ + Checks that the _properties function returns + the defined property "data_property", and + resulting string includes the property value. + """ + + class DataClassSample(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + @property + def data_property(self) -> str: + return "value" + + event_source = DataClassSample({}) + assert str(event_source) == "{'data_property': 'value', 'raw_event': '[SENSITIVE]'}" + + +def test_dict_wrapper_str_property_exception(): + """ + Check the recursive _str_helper function handles + exceptions that may occur when accessing properties + """ + + class DataClassSample(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + @property + def data_property(self): + raise Exception() + + event_source = DataClassSample({}) + assert str(event_source) == "{'data_property': '[Cannot be deserialized]', 'raw_event': '[SENSITIVE]'}" + + +def test_dict_wrapper_str_property_list_exception(): + """ + Check that _str_helper properly handles exceptions + that occur when recursively working through items + in a list property. + """ + + class BrokenDataClass(DictWrapper): + @property + def broken_data_property(self): + raise Exception() + + class DataClassSample(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + @property + def data_property(self) -> list: + return ["string", 0, 0.0, BrokenDataClass({})] + + event_source = DataClassSample({}) + event_str = ( + "{'data_property': ['string', 0, 0.0, {'broken_data_property': " + + "'[Cannot be deserialized]', 'raw_event': '[SENSITIVE]'}], 'raw_event': '[SENSITIVE]'}" + ) + assert str(event_source) == event_str + + +def test_dict_wrapper_str_recursive_property(): + """ + Check that the _str_helper function recursively + handles Data Classes within Data Classes + """ + + class DataClassTerminal(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + @property + def terminal_property(self) -> str: + return "end-recursion" + + class DataClassRecursive(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + @property + def data_property(self) -> DataClassTerminal: + return DataClassTerminal({}) + + event_source = DataClassRecursive({}) + assert ( + str(event_source) + == "{'data_property': {'raw_event': '[SENSITIVE]', 'terminal_property': 'end-recursion'}," + + " 'raw_event': '[SENSITIVE]'}" + ) + + +def test_dict_wrapper_sensitive_properties_property(): + """ + Checks that the _str_helper function correctly + handles _sensitive_properties + """ + + class DataClassSample(DictWrapper): + attribute = None + + def function(self) -> None: + pass + + _sensitive_properties = ["data_property"] + + @property + def data_property(self) -> str: + return "value" + + event_source = DataClassSample({}) + assert str(event_source) == "{'data_property': '[SENSITIVE]', 'raw_event': '[SENSITIVE]'}" + + def test_cloud_watch_dashboard_event(): event = CloudWatchDashboardCustomWidgetEvent(load_event("cloudWatchDashboardEvent.json")) assert event.describe is False @@ -782,6 +967,9 @@ def test_seq_trigger_event(): assert record.queue_url == "https://sqs.us-east-2.amazonaws.com/123456789012/my-queue" assert record.aws_region == "us-east-2" + record_2 = records[1] + assert record_2.json_body == {"message": "foo1"} + def test_default_api_gateway_proxy_event(): event = APIGatewayProxyEvent(load_event("apiGatewayProxyEvent_noVersionAuth.json")) diff --git a/tests/functional/test_logger_powertools_formatter.py b/tests/functional/test_logger_powertools_formatter.py index 7276f49d487..8b874894e27 100644 --- a/tests/functional/test_logger_powertools_formatter.py +++ b/tests/functional/test_logger_powertools_formatter.py @@ -3,12 +3,14 @@ import json import os import random +import re import string import time import pytest from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.formatters.datadog import DatadogLogFormatter @pytest.fixture @@ -22,6 +24,10 @@ def service_name(): return "".join(random.SystemRandom().choice(chars) for _ in range(15)) +def capture_logging_output(stdout): + return json.loads(stdout.getvalue().strip()) + + @pytest.mark.parametrize("level", ["DEBUG", "WARNING", "ERROR", "INFO", "CRITICAL"]) def test_setup_with_valid_log_levels(stdout, level, service_name): logger = Logger(service=service_name, level=level, stream=stdout, request_id="request id!", another="value") @@ -309,3 +315,17 @@ def test_log_json_pretty_indent(stdout, service_name, monkeypatch): # THEN the json should contain more than line new_lines = stdout.getvalue().count(os.linesep) assert new_lines > 1 + + +def test_datadog_formatter_use_rfc3339_date(stdout, service_name): + # GIVEN Datadog Log Formatter is used + logger = Logger(service=service_name, stream=stdout, logger_formatter=DatadogLogFormatter()) + RFC3339_REGEX = r"^((?:(\d{4}-\d{2}-\d{2})T(\d{2}:\d{2}:\d{2}(?:\.\d+)?))(Z|[\+-]\d{2}:\d{2})?)$" + + # WHEN a log statement happens + logger.info({}) + + # THEN the timestamp uses RFC3339 by default + log = capture_logging_output(stdout) + + assert re.fullmatch(RFC3339_REGEX, log["timestamp"]) # "2022-10-27T17:42:26.841+0200" diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index c0c41f3bf88..964af99ce6e 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -249,6 +249,26 @@ def lambda_handler(evt, ctx): assert expected == output +def test_log_metrics_manual_flush(capsys, metrics, dimensions, namespace): + # GIVEN Metrics is initialized + my_metrics = Metrics(namespace=namespace) + for metric in metrics: + my_metrics.add_metric(**metric) + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + + # WHEN we manually the metrics + my_metrics.flush_metrics() + + output = capture_metrics_output(capsys) + expected = serialize_metrics(metrics=metrics, dimensions=dimensions, namespace=namespace) + + # THEN we should have no exceptions + # and a valid EMF object should be flushed correctly + remove_timestamp(metrics=[output, expected]) + assert expected == output + + def test_namespace_env_var(monkeypatch, capsys, metric, dimension, namespace): # GIVEN POWERTOOLS_METRICS_NAMESPACE is set monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", namespace) diff --git a/tests/unit/parser/test_s3.py b/tests/unit/parser/test_s3.py index 6d11ba8b9fd..6ae2656ddd2 100644 --- a/tests/unit/parser/test_s3.py +++ b/tests/unit/parser/test_s3.py @@ -1,7 +1,12 @@ +import json from datetime import datetime +import pytest + +from aws_lambda_powertools.utilities.parser import ValidationError from aws_lambda_powertools.utilities.parser.models import ( S3EventNotificationEventBridgeModel, + S3SqsEventNotificationModel, ) from tests.functional.utils import load_event @@ -105,3 +110,36 @@ def test_s3_eventbridge_notification_object_restore_completed_event(): assert model.detail.requester == raw_event["detail"]["requester"] assert model.detail.restore_expiry_time == raw_event["detail"]["restore-expiry-time"] assert model.detail.source_storage_class == raw_event["detail"]["source-storage-class"] + + +def test_s3_sqs_event_notification(): + raw_event = load_event("s3SqsEvent.json") + model = S3SqsEventNotificationModel(**raw_event) + + body = json.loads(raw_event["Records"][0]["body"]) + + assert model.Records[0].body.Records[0].eventVersion == body["Records"][0]["eventVersion"] + assert model.Records[0].body.Records[0].eventSource == body["Records"][0]["eventSource"] + assert model.Records[0].body.Records[0].eventTime == datetime.fromisoformat( + body["Records"][0]["eventTime"].replace("Z", "+00:00") + ) + assert model.Records[0].body.Records[0].eventName == body["Records"][0]["eventName"] + + +def test_s3_sqs_event_notification_body_invalid_json(): + raw_event = load_event("s3SqsEvent.json") + + for record in raw_event["Records"]: + record["body"] = "invalid body" + + with pytest.raises(ValidationError): + S3SqsEventNotificationModel(**raw_event) + + +def test_s3_sqs_event_notification_body_containing_arbitrary_json(): + raw_event = load_event("s3SqsEvent.json") + for record in raw_event["Records"]: + record["body"] = {"foo": "bar"} + + with pytest.raises(ValidationError): + S3SqsEventNotificationModel(**raw_event)