From b00fd378bc023911e28e831f008abd8657c0f029 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 17 Nov 2021 15:51:56 +0100 Subject: [PATCH 01/36] chore(ci): split latest docs workflow --- .github/workflows/publish.yml | 23 +-------- .github/workflows/rebuild_latest_docs.yml | 62 +++++++++++++++++++++++ 2 files changed, 64 insertions(+), 21 deletions(-) create mode 100644 .github/workflows/rebuild_latest_docs.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 5a7c1a3110c..b0f8f4dd919 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -33,23 +33,12 @@ name: Publish to PyPi # # === Documentation hotfix === # -# 1. Trigger "Publish to PyPi" workflow manually: https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow -# 2. Use the latest version released under Releases e.g. v1.21.1 -# 3. Set `Build and publish docs only` field to `true` +# Look for rebuild latest docs workflow on: release: types: [published] - workflow_dispatch: - inputs: - publish_version: - description: 'Version to publish, e.g. v1.13.0' - required: true - publish_docs_only: - description: 'Build and publish docs only' - required: false - default: 'false' jobs: release: @@ -65,40 +54,33 @@ jobs: - name: Set release notes tag run: | RELEASE_TAG_VERSION=${{ github.event.release.tag_name }} - # Replace publishing version if the workflow was triggered manually - # test -n ${RELEASE_TAG_VERSION} && RELEASE_TAG_VERSION=${{ github.event.inputs.publish_version }} echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV - name: Ensure new version is also set in pyproject and CHANGELOG - if: ${{ github.event.inputs.publish_docs_only == false }} run: | grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml - name: Install dependencies run: make dev - name: Run all tests, linting and baselines - if: ${{ github.event.inputs.publish_docs_only == false }} run: make pr - name: Build python package and wheel - if: ${{ github.event.inputs.publish_docs_only == false }} run: poetry build - name: Upload to PyPi test - if: ${{ github.event.inputs.publish_docs_only == false }} run: make release-test env: PYPI_USERNAME: __token__ PYPI_TEST_TOKEN: ${{ secrets.PYPI_TEST_TOKEN }} - name: Upload to PyPi prod - if: ${{ github.event.inputs.publish_docs_only == false }} run: make release-prod env: PYPI_USERNAME: __token__ PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - name: publish lambda layer in SAR by triggering the internal codepipeline - if: ${{ github.event.inputs.publish_docs_only == false }} run: | aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }} env: + # Maintenance: Migrate to new OAuth mechanism AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: eu-west-1 @@ -129,7 +111,6 @@ jobs: sync_master: needs: release runs-on: ubuntu-latest - if: ${{ github.event.inputs.publish_docs_only == false }} steps: - uses: actions/checkout@v2 - name: Sync master from detached head diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml new file mode 100644 index 00000000000..8d05c1aadbc --- /dev/null +++ b/.github/workflows/rebuild_latest_docs.yml @@ -0,0 +1,62 @@ +name: Rebuild latest docs + +# +# === Documentation hotfix === +# +# 1. Trigger "Rebuild latest docs" workflow manually: https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow +# 2. Use the latest version released under Releases e.g. v1.22.0 +# 3. Set `Build and publish docs only` field to `true` + + +on: + workflow_dispatch: + inputs: + latest_published_version: + description: 'Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0' + default: 'v1.22.0' + required: true + + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v2.2.2 + with: + python-version: "3.8" + - name: Set release notes tag + run: | + RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} + echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV + - name: Ensure new version is also set in pyproject and CHANGELOG + run: | + grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md + grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml + - name: Install dependencies + run: make dev + - name: Setup doc deploy + run: | + git config --global user.name Docs deploy + git config --global user.email aws-devax-open-source@amazon.com + - name: Build docs website and API reference + run: | + make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" + poetry run mike set-default --push latest + - name: Release API docs to release version + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api + - name: Release API docs to latest + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: latest/api From 6bd85c5791e7352de1c1cad0e9fd737f50f91e19 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 17 Nov 2021 15:55:27 +0100 Subject: [PATCH 02/36] docs: update Lambda Layers version --- docs/index.md | 56 +++++++++++++++++++++++++-------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/docs/index.md b/docs/index.md index 86b91635163..61abd260cb9 100644 --- a/docs/index.md +++ b/docs/index.md @@ -23,7 +23,7 @@ This project separates core utilities that will be available in other runtimes v Powertools is available in the following formats: -* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:3**](#){: .copyMe} :clipboard: +* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:4**](#){: .copyMe} :clipboard: * **PyPi**: **`pip install aws-lambda-powertools`** ### Lambda Layer @@ -36,23 +36,23 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: | Region | Layer ARN |--------------------------- | --------------------------- - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:3](#){: .copyMe} :clipboard: + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:4](#){: .copyMe} :clipboard: === "SAM" @@ -61,7 +61,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:3 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:4 ``` === "Serverless framework" @@ -71,7 +71,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:3 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:4 ``` === "CDK" @@ -90,7 +90,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: function_name='sample-lambda', code=aws_lambda.Code.asset('./src'), handler='app.handler', - layers: [f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:3"] + layers: [f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:4"] ) ``` @@ -133,7 +133,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:3"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:4"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -152,7 +152,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:3 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:4 ❯ amplify push -y @@ -163,14 +163,14 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:3 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:4 ? Do you want to edit the local lambda function now? No ``` === "Get the Layer .zip contents" Change {region} to your AWS region, e.g. `eu-west-1` - **`aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:3 --region {region}`** + **`aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:4 --region {region}`** !!! warning "Limitations" @@ -205,7 +205,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Properties: Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - SemanticVersion: 1.21.1 # change to latest semantic version available in SAR + SemanticVersion: 1.22.0 # change to latest semantic version available in SAR MyLambdaFunction: Type: AWS::Serverless::Function @@ -233,7 +233,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - SemanticVersion: 1.21.1 + SemanticVersion: 1.22.0 ``` === "CDK" @@ -243,7 +243,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, POWERTOOLS_BASE_NAME = 'AWSLambdaPowertools' # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - POWERTOOLS_VER = '1.21.1' + POWERTOOLS_VER = '1.22.0' POWERTOOLS_ARN = 'arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer' class SampleApp(core.Construct): From 5c5a72ec29c2f6be63be73c737d17c8fe822b523 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 17 Nov 2021 16:08:46 +0100 Subject: [PATCH 03/36] docs(appsync): fix users.py typo to locations #830 --- docs/core/event_handler/appsync.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index ce9150113b6..7cf99fa8ce2 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -717,7 +717,7 @@ You can subclass `AppSyncResolverEvent` to bring your own set of methods to hand As you grow the number of related GraphQL operations a given Lambda function should handle, it is natural to split them into separate files to ease maintenance - That's where the `Router` feature is useful. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `users.py`, this is how you'd use the `Router` feature. +Let's assume you have `app.py` as your Lambda function entrypoint and routes in `location.py`, this is how you'd use the `Router` feature. === "resolvers/location.py" @@ -746,7 +746,7 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in === "app.py" - We use `include_router` method and include all location operations registered in the `router` global object. + We use `include_router` method and include all `location` operations registered in the `router` global object. ```python hl_lines="8 13" from typing import Dict From 566043accc2d9e69a3a955dffb749687f672cef6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 18 Nov 2021 19:11:41 +0100 Subject: [PATCH 04/36] docs: disable search blur in non-prod env --- docs/javascript/extra.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/javascript/extra.js b/docs/javascript/extra.js index f886d1caba3..477c0615a75 100644 --- a/docs/javascript/extra.js +++ b/docs/javascript/extra.js @@ -10,6 +10,7 @@ const awsconfig = { }; const RUNTIME = "python" +const BASE_ORIGIN = "awslabs.github.io" function copyToClipboard(e) { e.preventDefault() @@ -18,6 +19,7 @@ function copyToClipboard(e) { } function enableSearchOnBlurElement() { + if (document.location.hostname != BASE_ORIGIN) return // prevent unnecessary data /* Register handler to log search on blur */ document.addEventListener("DOMContentLoaded", function () { recordPageView({ From fe2cf13ace5085f6402c827fab0b699bdf4552fc Mon Sep 17 00:00:00 2001 From: Tom McCarthy Date: Fri, 19 Nov 2021 18:13:42 +0100 Subject: [PATCH 05/36] fix(apigateway): allow list of HTTP methods in route method (#838) --- .../event_handler/api_gateway.py | 33 ++--- docs/core/event_handler/api_gateway.md | 119 +++++++++++++----- .../event_handler/test_api_gateway.py | 36 ++++++ 3 files changed, 145 insertions(+), 43 deletions(-) diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index dce520c147d..d950bdc9c52 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -10,7 +10,7 @@ from enum import Enum from functools import partial from http import HTTPStatus -from typing import Any, Callable, Dict, List, Optional, Set, Union +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.exceptions import ServiceError @@ -453,7 +453,7 @@ def __init__( def route( self, rule: str, - method: str, + method: Union[str, Union[List[str], Tuple[str]]], cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None, @@ -461,19 +461,22 @@ def route( """Route decorator includes parameter `method`""" def register_resolver(func: Callable): - logger.debug(f"Adding route using rule {rule} and method {method.upper()}") + methods = (method,) if isinstance(method, str) else method + logger.debug(f"Adding route using rule {rule} and methods: {','.join((m.upper() for m in methods))}") if cors is None: cors_enabled = self._cors_enabled else: cors_enabled = cors - self._routes.append(Route(method, self._compile_regex(rule), func, cors_enabled, compress, cache_control)) - route_key = method + rule - if route_key in self._route_keys: - warnings.warn(f"A route like this was already registered. method: '{method}' rule: '{rule}'") - self._route_keys.append(route_key) - if cors_enabled: - logger.debug(f"Registering method {method.upper()} to Allow Methods in CORS") - self._cors_methods.add(method.upper()) + + for item in methods: + self._routes.append(Route(item, self._compile_regex(rule), func, cors_enabled, compress, cache_control)) + route_key = item + rule + if route_key in self._route_keys: + warnings.warn(f"A route like this was already registered. method: '{item}' rule: '{rule}'") + self._route_keys.append(route_key) + if cors_enabled: + logger.debug(f"Registering method {item.upper()} to Allow Methods in CORS") + self._cors_methods.add(item.upper()) return func return register_resolver @@ -679,14 +682,14 @@ def __init__(self): def route( self, rule: str, - method: Union[str, List[str]], + method: Union[str, Union[List[str], Tuple[str]]], cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None, ): def register_route(func: Callable): - methods = method if isinstance(method, list) else [method] - for item in methods: - self._routes[(rule, item, cors, compress, cache_control)] = func + # Convert methods to tuple. It needs to be hashable as its part of the self._routes dict key + methods = (method,) if isinstance(method, str) else tuple(method) + self._routes[(rule, methods, cors, compress, cache_control)] = func return register_route diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index f9482edaacf..8c0d5e6621e 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -42,45 +42,27 @@ This is the sample infrastructure for API Gateway we are using for the examples Timeout: 5 Runtime: python3.8 Tracing: Active - Environment: + Environment: Variables: LOG_LEVEL: INFO POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 POWERTOOLS_LOGGER_LOG_EVENT: true POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: hello + POWERTOOLS_SERVICE_NAME: my_api-service Resources: - HelloWorldFunction: + ApiFunction: Type: AWS::Serverless::Function Properties: Handler: app.lambda_handler - CodeUri: hello_world - Description: Hello World function + CodeUri: api_handler/ + Description: API handler function Events: - HelloUniverse: - Type: Api - Properties: - Path: /hello - Method: GET - HelloYou: - Type: Api - Properties: - Path: /hello/{name} # see Dynamic routes section - Method: GET - CustomMessage: - Type: Api - Properties: - Path: /{message}/{name} # see Dynamic routes section - Method: GET - - Outputs: - HelloWorldApigwURL: - Description: "API Gateway endpoint URL for Prod environment for Hello World Function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello" - HelloWorldFunction: - Description: "Hello World Lambda Function ARN" - Value: !GetAtt HelloWorldFunction.Arn + ApiEvent: + Type: Api + Properties: + Path: /{proxy+} # Send requests on any path to the lambda function + Method: ANY # Send requests using any http method to the lambda function ``` ### API Gateway decorator @@ -360,6 +342,87 @@ You can also combine nested paths with greedy regex to catch in between routes. ... } ``` +### HTTP Methods +You can use named decorators to specify the HTTP method that should be handled in your functions. As well as the +`get` method already shown above, you can use `post`, `put`, `patch`, `delete`, and `patch`. + +=== "app.py" + + ```python hl_lines="9-10" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() + + # Only POST HTTP requests to the path /hello will route to this function + @app.post("/hello") + @tracer.capture_method + def get_hello_you(): + name = app.current_event.json_body.get("name") + return {"message": f"hello {name}"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "sample_request.json" + + ```json + { + "resource": "/hello/{name}", + "path": "/hello/lessa", + "httpMethod": "GET", + ... + } + ``` + +If you need to accept multiple HTTP methods in a single function, you can use the `route` method and pass a list of +HTTP methods. + +=== "app.py" + + ```python hl_lines="9-10" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() + + # PUT and POST HTTP requests to the path /hello will route to this function + @app.route("/hello", method=["PUT", "POST"]) + @tracer.capture_method + def get_hello_you(): + name = app.current_event.json_body.get("name") + return {"message": f"hello {name}"} + + # You can continue to use other utilities just as before + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "sample_request.json" + + ```json + { + "resource": "/hello/{name}", + "path": "/hello/lessa", + "httpMethod": "GET", + ... + } + ``` + +!!! note "It is usually better to have separate functions for each HTTP method, as the functionality tends to differ +depending on which method is used." ### Accessing request details diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index f4543fa300c..09594789ac3 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -1021,3 +1021,39 @@ def get_func_another_duplicate(): # THEN only execute the first registered route # AND print warnings assert result["statusCode"] == 200 + + +def test_route_multiple_methods(): + # GIVEN a function with http methods passed as a list + app = ApiGatewayResolver() + req = "foo" + get_event = deepcopy(LOAD_GW_EVENT) + get_event["resource"] = "/accounts/{account_id}" + get_event["path"] = f"/accounts/{req}" + + post_event = deepcopy(get_event) + post_event["httpMethod"] = "POST" + + put_event = deepcopy(get_event) + put_event["httpMethod"] = "PUT" + + lambda_context = {} + + @app.route(rule="/accounts/", method=["GET", "POST"]) + def foo(account_id): + assert app.lambda_context == lambda_context + assert account_id == f"{req}" + return {} + + # WHEN calling the event handler with the supplied methods + get_result = app(get_event, lambda_context) + post_result = app(post_event, lambda_context) + put_result = app(put_event, lambda_context) + + # THEN events are processed correctly + assert get_result["statusCode"] == 200 + assert get_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert post_result["statusCode"] == 200 + assert post_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert put_result["statusCode"] == 404 + assert put_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON From be6e722e3d552c56ef916c87dffeb40e394fe499 Mon Sep 17 00:00:00 2001 From: Tom McCarthy Date: Thu, 25 Nov 2021 03:42:25 +0100 Subject: [PATCH 06/36] feat(apigateway): access parent api resolver from router (#842) --- .../event_handler/api_gateway.py | 5 +++++ .../event_handler/test_api_gateway.py | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index d950bdc9c52..b3d77df24b4 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -664,6 +664,10 @@ def include_router(self, router: "Router", prefix: Optional[str] = None) -> None prefix : str, optional An optional prefix to be added to the originally defined rule """ + + # Add reference to parent ApiGatewayResolver to support use cases where people subclass it to add custom logic + router.api_resolver = self + for route, func in router._routes.items(): if prefix: rule = route[0] @@ -678,6 +682,7 @@ class Router(BaseRouter): def __init__(self): self._routes: Dict[tuple, Callable] = {} + self.api_resolver: Optional[BaseRouter] = None def route( self, diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index 09594789ac3..f28752e6de6 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -1057,3 +1057,22 @@ def foo(account_id): assert post_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON assert put_result["statusCode"] == 404 assert put_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + + +def test_api_gateway_app_router_access_to_resolver(): + # GIVEN a Router with registered routes + app = ApiGatewayResolver() + router = Router() + + @router.get("/my/path") + def foo(): + # WHEN accessing the api resolver instance via the router + # THEN it is accessible and equal to the instantiated api resolver + assert app == router.api_resolver + return {} + + app.include_router(router) + result = app(LOAD_GW_EVENT, {}) + + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON From a5e9dda604d380cbf47c1e037d3b9a1241fe0351 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Nov 2021 09:01:25 +0100 Subject: [PATCH 07/36] docs(lambda_layer): fix CDK layer syntax --- docs/index.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/index.md b/docs/index.md index 61abd260cb9..03d89725b7b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -76,7 +76,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: === "CDK" - ```python hl_lines="14" + ```python hl_lines="11 16" from aws_cdk import core, aws_lambda class SampleApp(core.Construct): @@ -84,13 +84,16 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None: super().__init__(scope, id_) + powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( + self, + id="lambda-powertools", + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:4" + ) aws_lambda.Function(self, 'sample-app-lambda', runtime=aws_lambda.Runtime.PYTHON_3_9, - function_name='sample-lambda', - code=aws_lambda.Code.asset('./src'), - handler='app.handler', - layers: [f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:4"] + layers=[powertools_layer] + # other props... ) ``` From cf1c1935b017e0f05b7eee791a532f6f09794918 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 15:58:56 +0000 Subject: [PATCH 08/36] chore(deps): bump actions/setup-python from 2.2.2 to 2.3.0 (#831) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 2.2.2 to 2.3.0.
Release notes

Sourced from actions/setup-python's releases.

Support caching dependencies

This release introduces dependency caching support (actions/setup-python#266)

Caching dependencies.

The action has a built-in functionality for caching and restoring pip/pipenv dependencies. The cache input is optional, and caching is turned off by default.

Besides, this release introduces dependency caching support for mono repos and repositories with complex structure.

By default, the action searches for the dependency file (requirements.txt for pip or Pipfile.lock for pipenv) in the whole repository. Use the cache-dependency-path input for cases when you want to override current behaviour and use different file for hash generation (for example requirements-dev.txt). This input supports wildcards or a list of file names for caching multiple dependencies.

Caching pip dependencies:

steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
  with:
    python-version: '3.9'
    cache: 'pip'
- run: pip install -r requirements.txt
- run: pip test

Caching pipenv dependencies:

steps:
- uses: actions/checkout@v2
- name: Install pipenv
  run: pipx install pipenv
- uses: actions/setup-python@v2
  with:
    python-version: '3.9'
    cache: 'pipenv'
- run: pipenv install
- run: pipenv test

Change dependency file:

steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
  with:
    python-version: '3.9'
    cache: 'pip'
    cache-dependency-path: '**/requirements-dev.txt'
- run: pip install -r subdirectory/requirements-dev.txt
- run: pip test
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-python&package-manager=github_actions&previous-version=2.2.2&new-version=2.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- .github/workflows/publish.yml | 2 +- .github/workflows/python_build.yml | 2 +- .github/workflows/python_docs.yml | 2 +- .github/workflows/rebuild_latest_docs.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b0f8f4dd919..41dedc61ea3 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -48,7 +48,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v2.2.2 + uses: actions/setup-python@v2.3.0 with: python-version: "3.8" - name: Set release notes tag diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 6dc4446ee14..4b603024115 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2.2.2 + uses: actions/setup-python@v2.3.0 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index 219b9381a8a..ea86e239c53 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -17,7 +17,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v2.2.2 + uses: actions/setup-python@v2.3.0 with: python-version: "3.8" - name: Install dependencies diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index 8d05c1aadbc..d089b0ce4c3 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -25,7 +25,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v2.2.2 + uses: actions/setup-python@v2.3.0 with: python-version: "3.8" - name: Set release notes tag From c3f6298afb4287ee0fd1886dad2574ab35c391c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 15:59:05 +0000 Subject: [PATCH 09/36] chore(deps-dev): bump black from 21.10b0 to 21.11b1 (#839) Bumps [black](https://github.com/psf/black) from 21.10b0 to 21.11b1.
Release notes

Sourced from black's releases.

21.11b1

Black

  • Bumped regex version minimum to 2021.4.4 to fix Pattern class usage (#2621)

21.11b0

Black

  • Warn about Python 2 deprecation in more cases by improving Python 2 only syntax detection (#2592)
  • Add experimental PyPy support (#2559)
  • Add partial support for the match statement. As it's experimental, it's only enabled when --target-version py310 is explicitly specified (#2586)
  • Add support for parenthesized with (#2586)
  • Declare support for Python 3.10 for running Black (#2562)

Integrations

  • Fixed vim plugin with Python 3.10 by removing deprecated distutils import (#2610)
  • The vim plugin now parses skip_magic_trailing_comma from pyproject.toml (#2613)
Changelog

Sourced from black's changelog.

21.11b1

Black

  • Bumped regex version minimum to 2021.4.4 to fix Pattern class usage (#2621)

21.11b0

Black

  • Warn about Python 2 deprecation in more cases by improving Python 2 only syntax detection (#2592)
  • Add experimental PyPy support (#2559)
  • Add partial support for the match statement. As it's experimental, it's only enabled when --target-version py310 is explicitly specified (#2586)
  • Add support for parenthesized with (#2586)
  • Declare support for Python 3.10 for running Black (#2562)

Integrations

  • Fixed vim plugin with Python 3.10 by removing deprecated distutils import (#2610)
  • The vim plugin now parses skip_magic_trailing_comma from pyproject.toml (#2613)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=black&package-manager=pip&previous-version=21.10b0&new-version=21.11b1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index fea9831cd5f..d1759b65bcd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -50,7 +50,7 @@ stevedore = ">=1.20.0" [[package]] name = "black" -version = "21.10b0" +version = "21.11b1" description = "The uncompromising code formatter." category = "dev" optional = false @@ -62,9 +62,9 @@ dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0,<1" platformdirs = ">=2" -regex = ">=2020.1.8" +regex = ">=2021.4.4" tomli = ">=0.2.6,<2.0.0" -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, @@ -1065,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "2873198da6ba0fc9487a838f4bb5e3f7c7d35fa31cf7a6a412733927cfed5c5f" +content-hash = "c9a93d64439abc479d4c7369922138f6d63d6434aed10c3e871a7064d7278634" [metadata.files] atomicwrites = [ @@ -1085,8 +1085,8 @@ bandit = [ {file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"}, ] black = [ - {file = "black-21.10b0-py3-none-any.whl", hash = "sha256:6eb7448da9143ee65b856a5f3676b7dda98ad9abe0f87fce8c59291f15e82a5b"}, - {file = "black-21.10b0.tar.gz", hash = "sha256:a9952229092e325fe5f3dae56d81f639b23f7131eb840781947e4b2886030f33"}, + {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, + {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, ] boto3 = [ {file = "boto3-1.20.5-py3-none-any.whl", hash = "sha256:81ca80fbb3d551819c35c809cb159fd0bec6701d3d8f0e5906a22da7558d098e"}, diff --git a/pyproject.toml b/pyproject.toml index cea8a4abbdc..13d389aa87a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ email-validator = {version = "*", optional = true } [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^6.1"} pytest = "^6.2.5" -black = "^21.10.b0" +black = "^21.11b1" flake8 = "^3.9.0" flake8-black = "^0.2.3" flake8-builtins = "^1.5.3" From c1eb9280441c5fb95d330766725fb0cf1df33704 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 30 Nov 2021 11:56:55 +0100 Subject: [PATCH 10/36] fix(tracer): add warm start annotation (ColdStart=False) (#851) --- aws_lambda_powertools/tracing/tracer.py | 5 +++-- tests/unit/test_tracing.py | 29 ++++++++++++++++++++----- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 2626793304c..2beab0483be 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -304,9 +304,10 @@ def handler(event, context): def decorate(event, context, **kwargs): with self.provider.in_subsegment(name=f"## {lambda_handler_name}") as subsegment: global is_cold_start + logger.debug("Annotating cold start") + subsegment.put_annotation(key="ColdStart", value=is_cold_start) + if is_cold_start: - logger.debug("Annotating cold start") - subsegment.put_annotation(key="ColdStart", value=True) is_cold_start = False try: diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index 2b147ec4405..7c8b6244f01 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -51,7 +51,9 @@ def patch(self, *args, **kwargs): def reset_tracing_config(mocker): Tracer._reset_config() # reset global cold start module - mocker.patch("aws_lambda_powertools.tracing.tracer.is_cold_start", return_value=True) + mocker.patch( + "aws_lambda_powertools.tracing.tracer.is_cold_start", new_callable=mocker.PropertyMock(return_value=True) + ) yield @@ -79,7 +81,7 @@ class InSubsegment(NamedTuple): yield in_subsegment -def test_tracer_lambda_handler(mocker, dummy_response, provider_stub, in_subsegment_mock): +def test_tracer_lambda_handler_subsegment(mocker, dummy_response, provider_stub, in_subsegment_mock): # GIVEN Tracer is initialized with booking as the service name provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) tracer = Tracer(provider=provider, service="booking") @@ -92,15 +94,13 @@ def handler(event, context): handler({}, mocker.MagicMock()) # THEN we should have a subsegment named handler - # annotate cold start, and add its response as trace metadata + # add its response as trace metadata # and use service name as a metadata namespace assert in_subsegment_mock.in_subsegment.call_count == 1 assert in_subsegment_mock.in_subsegment.call_args == mocker.call(name="## handler") assert in_subsegment_mock.put_metadata.call_args == mocker.call( key="handler response", value=dummy_response, namespace="booking" ) - assert in_subsegment_mock.put_annotation.call_count == 1 - assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="ColdStart", value=True) def test_tracer_method(mocker, dummy_response, provider_stub, in_subsegment_mock): @@ -571,3 +571,22 @@ def greeting(name, message): # THEN we should not add any metadata assert in_subsegment_mock.put_metadata.call_count == 0 + + +def test_tracer_lambda_handler_cold_start(mocker, dummy_response, provider_stub, in_subsegment_mock): + # GIVEN + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider, service="booking") + + # WHEN + @tracer.capture_lambda_handler + def handler(event, context): + return dummy_response + + handler({}, mocker.MagicMock()) + + # THEN + assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="ColdStart", value=True) + + handler({}, mocker.MagicMock()) + assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="ColdStart", value=False) From 4100d19f5222568fe6f09f7fd67f2d1616e063bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Nov 2021 11:02:42 +0000 Subject: [PATCH 11/36] chore(deps): bump actions/setup-python from 2.3.0 to 2.3.1 (#852) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/setup-python](https://github.com/actions/setup-python) from 2.3.0 to 2.3.1.
Release notes

Sourced from actions/setup-python's releases.

Update actions/cache version to 1.0.8

We have updated actions/cache dependency version to 1.0.8 to support 10GB cache upload

Commits
  • f382193 Update @​actions/cache version to 1.0.8 (#283)
  • 3ef38b8 Create ADR for integrating cache functionality to setup-python action (#247)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-python&package-manager=github_actions&previous-version=2.3.0&new-version=2.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- .github/workflows/publish.yml | 2 +- .github/workflows/python_build.yml | 2 +- .github/workflows/python_docs.yml | 2 +- .github/workflows/rebuild_latest_docs.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 41dedc61ea3..1a8f26db879 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -48,7 +48,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v2.3.0 + uses: actions/setup-python@v2.3.1 with: python-version: "3.8" - name: Set release notes tag diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 4b603024115..17aa08ead81 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2.3.0 + uses: actions/setup-python@v2.3.1 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index ea86e239c53..bb1426534fb 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -17,7 +17,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v2.3.0 + uses: actions/setup-python@v2.3.1 with: python-version: "3.8" - name: Install dependencies diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index d089b0ce4c3..606d1c55696 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -25,7 +25,7 @@ jobs: with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v2.3.0 + uses: actions/setup-python@v2.3.1 with: python-version: "3.8" - name: Set release notes tag From 1ad18c9aeadcc9692dca4bcf1f6e1b152ac58768 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Nov 2021 11:02:48 +0000 Subject: [PATCH 12/36] chore(deps-dev): bump flake8 from 3.9.2 to 4.0.1 (#789) Bumps [flake8](https://github.com/pycqa/flake8) from 3.9.2 to 4.0.1.
Commits
  • 82b698e Release 4.0.1
  • 0fac346 Merge pull request #1410 from PyCQA/parallel-syntax-error
  • aa54693 fix parallel execution collecting a SyntaxError
  • d31c535 Release 4.0.0
  • afd2399 Merge pull request #1407 from asottile/setup-cfg-fmt
  • 960cf8c rerun setup-cfg-fmt (and restore comments)
  • d7baba5 Merge pull request #1406 from asottile/update-versions
  • d79021a update dependency versions
  • 283f0c8 Merge pull request #1404 from PyCQA/drop-xdg-config
  • 807904a Drop support for Home and XDG config files
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=flake8&package-manager=pip&previous-version=3.9.2&new-version=4.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 30 +++++++++++++++--------------- pyproject.toml | 2 +- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/poetry.lock b/poetry.lock index d1759b65bcd..541b3f3e0ee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -215,17 +215,17 @@ devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benc [[package]] name = "flake8" -version = "3.9.2" +version = "4.0.1" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" [[package]] name = "flake8-black" @@ -701,11 +701,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pycodestyle" -version = "2.7.0" +version = "2.8.0" description = "Python style guide checker" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydantic" @@ -725,7 +725,7 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyflakes" -version = "2.3.1" +version = "2.4.0" description = "passive checker of Python programs" category = "dev" optional = false @@ -1065,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "c9a93d64439abc479d4c7369922138f6d63d6434aed10c3e871a7064d7278634" +content-hash = "18b7eb5ba70ed322228d5e0b09e375c60ede5ff94c77c618f2ed0414f2e0b58d" [metadata.files] atomicwrites = [ @@ -1181,8 +1181,8 @@ fastjsonschema = [ {file = "fastjsonschema-2.15.1.tar.gz", hash = "sha256:671f36d225b3493629b5e789428660109528f373cf4b8a22bac6fa2f8191c2d2"}, ] flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] flake8-black = [ {file = "flake8-black-0.2.3.tar.gz", hash = "sha256:c199844bc1b559d91195ebe8620216f21ed67f2cc1ff6884294c91a0d2492684"}, @@ -1426,8 +1426,8 @@ py = [ {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pydantic = [ {file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"}, @@ -1454,8 +1454,8 @@ pydantic = [ {file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"}, ] pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pygments = [ {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, diff --git a/pyproject.toml b/pyproject.toml index 13d389aa87a..f999daf4d28 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ email-validator = {version = "*", optional = true } coverage = {extras = ["toml"], version = "^6.1"} pytest = "^6.2.5" black = "^21.11b1" -flake8 = "^3.9.0" +flake8 = "^4.0.1" flake8-black = "^0.2.3" flake8-builtins = "^1.5.3" flake8-comprehensions = "^3.7.0" From e4a1b9d030e1c617307a7256f1a174f29cc3aa81 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 30 Nov 2021 16:16:23 +0100 Subject: [PATCH 13/36] feat(tracer): add service annotation when service is set (#861) --- Makefile | 3 ++ aws_lambda_powertools/tracing/tracer.py | 46 +++++++++++++++++++------ docs/core/tracer.md | 1 + tests/unit/test_tracing.py | 42 ++++++++++++++++++++-- 4 files changed, 80 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index 6b9d6ef0963..5b8e9b0d689 100644 --- a/Makefile +++ b/Makefile @@ -20,6 +20,9 @@ test: poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance +unit-test: + poetry run pytest tests/unit + coverage-html: poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=html diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 2beab0483be..70580663e7b 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -17,7 +17,6 @@ logger = logging.getLogger(__name__) aws_xray_sdk = LazyLoader(constants.XRAY_SDK_MODULE, globals(), constants.XRAY_SDK_MODULE) -aws_xray_sdk.core = LazyLoader(constants.XRAY_SDK_CORE_MODULE, globals(), constants.XRAY_SDK_CORE_MODULE) # type: ignore # noqa: E501 class Tracer: @@ -137,7 +136,7 @@ def handler(event: dict, context: Any) -> Dict: """ _default_config: Dict[str, Any] = { - "service": "service_undefined", + "service": "", "disabled": False, "auto_patch": True, "patch_modules": None, @@ -156,7 +155,7 @@ def __init__( self.__build_config( service=service, disabled=disabled, auto_patch=auto_patch, patch_modules=patch_modules, provider=provider ) - self.provider: BaseProvider = self._config["provider"] + self.provider = self._config["provider"] self.disabled = self._config["disabled"] self.service = self._config["service"] self.auto_patch = self._config["auto_patch"] @@ -167,10 +166,8 @@ def __init__( if self.auto_patch: self.patch(modules=patch_modules) - # Set the streaming threshold to 0 on the default recorder to force sending - # subsegments individually, rather than batching them. - # See https://github.com/awslabs/aws-lambda-powertools-python/issues/283 - aws_xray_sdk.core.xray_recorder.configure(streaming_threshold=0) # noqa: E800 + if self._is_xray_provider(): + self._disable_xray_trace_batching() def put_annotation(self, key: str, value: Union[str, numbers.Number, bool]): """Adds annotation to existing segment or subsegment @@ -239,9 +236,9 @@ def patch(self, modules: Optional[Sequence[str]] = None): return if modules is None: - aws_xray_sdk.core.patch_all() + self.provider.patch_all() else: - aws_xray_sdk.core.patch(modules) + self.provider.patch(modules) def capture_lambda_handler( self, @@ -310,6 +307,9 @@ def decorate(event, context, **kwargs): if is_cold_start: is_cold_start = False + if self.service: + subsegment.put_annotation(key="Service", value=self.service) + try: logger.debug("Calling lambda handler") response = lambda_handler(event, context, **kwargs) @@ -743,7 +743,8 @@ def __build_config( is_disabled = disabled if disabled is not None else self._is_tracer_disabled() is_service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV)) - self._config["provider"] = provider or self._config["provider"] or aws_xray_sdk.core.xray_recorder + # Logic: Choose overridden option first, previously cached config, or default if available + self._config["provider"] = provider or self._config["provider"] or self._patch_xray_provider() self._config["auto_patch"] = auto_patch if auto_patch is not None else self._config["auto_patch"] self._config["service"] = is_service or self._config["service"] self._config["disabled"] = is_disabled or self._config["disabled"] @@ -752,3 +753,28 @@ def __build_config( @classmethod def _reset_config(cls): cls._config = copy.copy(cls._default_config) + + def _patch_xray_provider(self): + # Due to Lazy Import, we need to activate `core` attrib via import + # we also need to include `patch`, `patch_all` methods + # to ensure patch calls are done via the provider + from aws_xray_sdk.core import xray_recorder + + provider = xray_recorder + provider.patch = aws_xray_sdk.core.patch + provider.patch_all = aws_xray_sdk.core.patch_all + + return provider + + def _disable_xray_trace_batching(self): + """Configure X-Ray SDK to send subsegment individually over batching + Known issue: https://github.com/awslabs/aws-lambda-powertools-python/issues/283 + """ + if self.disabled: + logger.debug("Tracing has been disabled, aborting streaming override") + return + + aws_xray_sdk.core.xray_recorder.configure(streaming_threshold=0) + + def _is_xray_provider(self): + return "aws_xray_sdk" in self.provider.__module__ diff --git a/docs/core/tracer.md b/docs/core/tracer.md index e2e2df52e18..9e94d2549d9 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -58,6 +58,7 @@ You can quickly start by importing the `Tracer` class, initialize it outside the When using this `capture_lambda_handler` decorator, Tracer performs these additional tasks to ease operations: * Creates a `ColdStart` annotation to easily filter traces that have had an initialization overhead +* Creates a `Service` annotation if `service` parameter or `POWERTOOLS_SERVICE_NAME` is set * Captures any response, or full exceptions generated by the handler, and include as tracing metadata ### Annotations & Metadata diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index 7c8b6244f01..55273b072c6 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -44,6 +44,9 @@ def in_subsegment(self, *args, **kwargs): def patch(self, *args, **kwargs): return self.patch_mock(*args, **kwargs) + def patch_all(self): + ... + return CustomProvider @@ -586,7 +589,42 @@ def handler(event, context): handler({}, mocker.MagicMock()) # THEN - assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="ColdStart", value=True) + assert in_subsegment_mock.put_annotation.call_args_list[0] == mocker.call(key="ColdStart", value=True) + + handler({}, mocker.MagicMock()) + assert in_subsegment_mock.put_annotation.call_args_list[2] == mocker.call(key="ColdStart", value=False) + + +def test_tracer_lambda_handler_add_service_annotation(mocker, dummy_response, provider_stub, in_subsegment_mock): + # GIVEN + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider, service="booking") + + # WHEN + @tracer.capture_lambda_handler + def handler(event, context): + return dummy_response handler({}, mocker.MagicMock()) - assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="ColdStart", value=False) + + # THEN + assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="Service", value="booking") + + +def test_tracer_lambda_handler_do_not_add_service_annotation_when_missing( + mocker, dummy_response, provider_stub, in_subsegment_mock +): + # GIVEN + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider) + + # WHEN + @tracer.capture_lambda_handler + def handler(event, context): + return dummy_response + + handler({}, mocker.MagicMock()) + + # THEN + assert in_subsegment_mock.put_annotation.call_count == 1 + assert in_subsegment_mock.put_annotation.call_args == mocker.call(key="ColdStart", value=True) From 8cfa7737139286a061979eec0c1a4c12e720a107 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 30 Nov 2021 17:03:49 +0100 Subject: [PATCH 14/36] docs: add new public Slack invite --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c4778595366..d7796e27af5 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ With [pip](https://pip.pypa.io/en/latest/index.html) installed, run: ``pip insta ## Connect -* **AWS Developers Slack**: `#lambda-powertools`** - **[Invite, if you don't have an account](https://join.slack.com/t/awsdevelopers/shared_invite/zt-gu30gquv-EhwIYq3kHhhysaZ2aIX7ew)** +* **AWS Developers Slack**: `#lambda-powertools`** - **[Invite, if you don't have an account](https://join.slack.com/t/awsdevelopers/shared_invite/zt-yryddays-C9fkWrmguDv0h2EEDzCqvw)** * **Email**: aws-lambda-powertools-feedback@amazon.com ## License From fcfa2e42bdc540939dd13427a656f8d36d6d1444 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Fri, 3 Dec 2021 00:07:34 -0800 Subject: [PATCH 15/36] docs(apigateway): fix sample layout provided (#864) --- docs/core/event_handler/api_gateway.md | 69 +++++++++++--------------- 1 file changed, 30 insertions(+), 39 deletions(-) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 8c0d5e6621e..1f5fa4479c0 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -1027,43 +1027,42 @@ When necessary, you can set a prefix when including a router object. This means #### Sample layout -!!! info "We use ALB to demonstrate that the UX remains the same" - -This sample project contains an Users function with two distinct set of routes, `/users` and `/health`. The layout optimizes for code sharing, no custom build tooling, and it uses [Lambda Layers](../../index.md#lambda-layer) to install Lambda Powertools. +This sample project contains a Users function with two distinct set of routes, `/users` and `/health`. The layout optimizes for code sharing, no custom build tooling, and it uses [Lambda Layers](../../index.md#lambda-layer) to install Lambda Powertools. === "Project layout" - ```python hl_lines="6 8 10-13" + ```python hl_lines="1 8 10 12-15" . - ├── Pipfile # project app & dev dependencies; poetry, pipenv, etc. + ├── Pipfile # project app & dev dependencies; poetry, pipenv, etc. ├── Pipfile.lock - ├── mypy.ini # namespace_packages = True - ├── .env # VSCode only. PYTHONPATH="users:${PYTHONPATH}" - ├── users - │ ├── requirements.txt # sam build detect it automatically due to CodeUri: users, e.g. pipenv lock -r > users/requirements.txt - │ ├── lambda_function.py # this will be our users Lambda fn; it could be split in folders if we want separate fns same code base - │ ├── constants.py - │ └── routers # routers module + ├── README.md + ├── src │ ├── __init__.py - │ ├── users.py # /users routes, e.g. from routers import users; users.router - │ ├── health.py # /health routes, e.g. from routers import health; health.router - ├── template.yaml # SAM template.yml, CodeUri: users, Handler: users.main.lambda_handler + │ ├── requirements.txt # sam build detect it automatically due to CodeUri: src, e.g. pipenv lock -r > src/requirements.txt + │ └── users + │ ├── __init__.py + │ ├── main.py # this will be our users Lambda fn; it could be split in folders if we want separate fns same code base + │ └── routers # routers module + │ ├── __init__.py + │ ├── health.py # /users routes, e.g. from routers import users; users.router + │ └── users.py # /users routes, e.g. from .routers import users; users.router + ├── template.yml # SAM template.yml, CodeUri: src, Handler: users.main.lambda_handler └── tests ├── __init__.py ├── unit │ ├── __init__.py - │ └── test_users.py # unit tests for the users router - │ └── test_health.py # unit tests for the health router + │ └── test_users.py # unit tests for the users router + │ └── test_health.py # unit tests for the health router └── functional ├── __init__.py - ├── conftest.py # pytest fixtures for the functional tests - └── test_lambda_function.py # functional tests for the main lambda handler + ├── conftest.py # pytest fixtures for the functional tests + └── test_main.py # functional tests for the main lambda handler ``` === "template.yml" - ```yaml hl_lines="20-21" + ```yaml hl_lines="22-23" AWSTemplateFormatVersion: '2010-09-09' Transform: AWS::Serverless-2016-10-31 Description: Example service with multiple routes @@ -1073,6 +1072,8 @@ This sample project contains an Users function with two distinct set of routes, MemorySize: 512 Runtime: python3.9 Tracing: Active + Architectures: + - x86_64 Environment: Variables: LOG_LEVEL: INFO @@ -1083,11 +1084,11 @@ This sample project contains an Users function with two distinct set of routes, UsersService: Type: AWS::Serverless::Function Properties: - Handler: lambda_function.lambda_handler - CodeUri: users + Handler: users.main.lambda_handler + CodeUri: src Layers: # Latest version: https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:3 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:4 Events: ByUser: Type: Api @@ -1119,7 +1120,7 @@ This sample project contains an Users function with two distinct set of routes, Value: !GetAtt UsersService.Arn ``` -=== "users/lambda_function.py" +=== "src/users/main.py" ```python hl_lines="9 15-16" from typing import Dict @@ -1130,23 +1131,23 @@ This sample project contains an Users function with two distinct set of routes, from aws_lambda_powertools.logging.correlation_paths import APPLICATION_LOAD_BALANCER from aws_lambda_powertools.utilities.typing import LambdaContext - from routers import health, users + from .routers import health, users tracer = Tracer() logger = Logger() - app = ApiGatewayResolver(proxy_type=ProxyEventType.ALBEvent) + app = ApiGatewayResolver(proxy_type=ProxyEventType.APIGatewayProxyEvent) app.include_router(health.router) app.include_router(users.router) - @logger.inject_lambda_context(correlation_id_path=APPLICATION_LOAD_BALANCER) + @logger.inject_lambda_context(correlation_id_path=API_GATEWAY_REST) @tracer.capture_lambda_handler def lambda_handler(event: Dict, context: LambdaContext): return app.resolve(event, context) ``` -=== "users/routers/health.py" +=== "src/users/routers/health.py" ```python hl_lines="4 6-7 10" from typing import Dict @@ -1169,7 +1170,7 @@ This sample project contains an Users function with two distinct set of routes, ```python hl_lines="3" import json - from users import main # follows namespace package from root + from src.users import main # follows namespace package from root def test_lambda_handler(apigw_event, lambda_context): @@ -1180,16 +1181,6 @@ This sample project contains an Users function with two distinct set of routes, assert ret["body"] == expected ``` -=== ".env" - - > Note: It is not needed for PyCharm (select folder as source). - - This is necessary for Visual Studio Code, so integrated tooling works without failing import. - - ```bash - PYTHONPATH="users:${PYTHONPATH}" - ``` - ### Considerations This utility is optimized for fast startup, minimal feature set, and to quickly on-board customers familiar with frameworks like Flask — it's not meant to be a fully fledged framework. From 81fc02e9f65f079e2fef35448668b6660746021d Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Fri, 3 Dec 2021 00:09:34 -0800 Subject: [PATCH 16/36] chore(deps): support arm64 when developing locally (#862) --- poetry.lock | 632 +++++++++++++++++++++++++++---------------------- pyproject.toml | 6 +- 2 files changed, 356 insertions(+), 282 deletions(-) diff --git a/poetry.lock b/poetry.lock index 541b3f3e0ee..65ca63e79c3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -35,7 +35,7 @@ wrapt = "*" [[package]] name = "bandit" -version = "1.7.0" +version = "1.7.1" description = "Security oriented static analyser for python code." category = "dev" optional = false @@ -45,7 +45,6 @@ python-versions = ">=3.5" colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" PyYAML = ">=5.3.1" -six = ">=1.10.0" stevedore = ">=1.20.0" [[package]] @@ -79,14 +78,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.20.5" +version = "1.20.17" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.23.5,<1.24.0" +botocore = ">=1.23.17,<1.24.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -95,7 +94,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.23.5" +version = "1.23.17" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -111,23 +110,26 @@ crt = ["awscrt (==0.12.5)"] [[package]] name = "certifi" -version = "2020.12.5" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = "*" [[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" +name = "charset-normalizer" +version = "2.0.8" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.1" +version = "8.0.3" description = "Composable command line interface toolkit" category = "dev" optional = false @@ -147,7 +149,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.1.2" +version = "6.2" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -242,7 +244,7 @@ toml = "*" [[package]] name = "flake8-bugbear" -version = "21.9.2" +version = "21.11.29" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -253,7 +255,7 @@ attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["coverage", "black", "hypothesis", "hypothesmith"] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] [[package]] name = "flake8-builtins" @@ -349,7 +351,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "ghp-import" -version = "2.0.1" +version = "2.0.2" description = "Copy your docs directly to the gh-pages branch." category = "dev" optional = false @@ -363,38 +365,38 @@ dev = ["twine", "markdown", "flake8", "wheel"] [[package]] name = "gitdb" -version = "4.0.7" +version = "4.0.9" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.4" +python-versions = ">=3.6" [package.dependencies] -smmap = ">=3.0.1,<5" +smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.17" +version = "3.1.20" description = "Python Git Library" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] name = "idna" -version = "2.10" +version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.0.1" +version = "4.2.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -432,7 +434,7 @@ plugins = ["setuptools"] [[package]] name = "jinja2" -version = "3.0.1" +version = "3.0.3" description = "A very fast and expressive template engine." category = "dev" optional = false @@ -454,7 +456,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "mako" -version = "1.1.4" +version = "1.1.6" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "dev" optional = false @@ -483,15 +485,12 @@ restructuredText = ["rst2ansi"] [[package]] name = "markdown" -version = "3.3.4" +version = "3.3.5" description = "Python implementation of Markdown." category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [package.extras] testing = ["coverage", "pyyaml"] @@ -591,14 +590,11 @@ pymdown-extensions = ">=9.0" [[package]] name = "mkdocs-material-extensions" -version = "1.0.1" +version = "1.0.3" description = "Extension pack for Python Markdown." category = "dev" optional = false -python-versions = ">=3.5" - -[package.dependencies] -mkdocs-material = ">=5.0.0" +python-versions = ">=3.6" [[package]] name = "mypy" @@ -628,14 +624,14 @@ python-versions = "*" [[package]] name = "packaging" -version = "20.9" +version = "21.3" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" @@ -647,7 +643,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "pbr" -version = "5.6.0" +version = "5.8.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -679,25 +675,26 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycodestyle" @@ -741,7 +738,7 @@ python-versions = ">=3.5" [[package]] name = "pymdown-extensions" -version = "9.0" +version = "9.1" description = "Extension pack for Python Markdown." category = "dev" optional = false @@ -752,11 +749,14 @@ Markdown = ">=3.2" [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.6" description = "Python parsing module" category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" @@ -825,7 +825,7 @@ dev = ["pre-commit", "tox", "pytest-asyncio"] [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -868,7 +868,7 @@ mando = ">=0.6,<0.7" [[package]] name = "regex" -version = "2021.4.4" +version = "2021.11.10" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -876,25 +876,25 @@ python-versions = "*" [[package]] name = "requests" -version = "2.25.1" +version = "2.26.0" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "ruamel.yaml" -version = "0.17.4" +version = "0.17.17" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "dev" optional = false @@ -909,11 +909,11 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel.yaml.clib" -version = "0.2.2" +version = "0.2.6" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" [[package]] name = "s3transfer" @@ -939,15 +939,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "4.0.0" +version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "stevedore" -version = "3.3.0" +version = "3.5.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -959,7 +959,7 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "testfixtures" -version = "6.17.1" +version = "6.18.3" description = "A collection of helpers and mock objects for unit tests and doc tests." category = "dev" optional = false @@ -980,7 +980,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.2.1" +version = "1.2.2" description = "A lil' TOML parser" category = "dev" optional = false @@ -996,15 +996,15 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "3.10.0.2" -description = "Backported and Experimental Type Hints for Python 3.5+" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "urllib3" -version = "1.26.5" +version = "1.26.7" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1017,22 +1017,22 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "watchdog" -version = "2.1.3" +version = "2.1.6" description = "Filesystem events monitoring" category = "dev" optional = false python-versions = ">=3.6" [package.extras] -watchmedo = ["PyYAML (>=3.10)", "argh (>=0.24.1)"] +watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wrapt" -version = "1.12.1" +version = "1.13.3" description = "Module for decorators, wrappers and monkey patching." category = "main" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "xenon" @@ -1049,7 +1049,7 @@ requests = ">=2.0,<3.0" [[package]] name = "zipp" -version = "3.4.1" +version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false @@ -1057,7 +1057,7 @@ python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] pydantic = ["pydantic", "email-validator"] @@ -1065,7 +1065,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "18b7eb5ba70ed322228d5e0b09e375c60ede5ff94c77c618f2ed0414f2e0b58d" +content-hash = "17510d9a98137b8f430df0fbca1bcb3037aab183a4ba2e8d5fdad8e7e81574fc" [metadata.files] atomicwrites = [ @@ -1081,85 +1081,85 @@ aws-xray-sdk = [ {file = "aws_xray_sdk-2.8.0-py2.py3-none-any.whl", hash = "sha256:487e44a2e0b2a5b994f7db5fad3a8115f1ea238249117a119bce8ca2750661bd"}, ] bandit = [ - {file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"}, - {file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"}, + {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"}, + {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, ] black = [ {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, ] boto3 = [ - {file = "boto3-1.20.5-py3-none-any.whl", hash = "sha256:81ca80fbb3d551819c35c809cb159fd0bec6701d3d8f0e5906a22da7558d098e"}, - {file = "boto3-1.20.5.tar.gz", hash = "sha256:cc620c289b12d7bf7c2706b517c9f8950f9be4622aacc9e7580b8b4ee0d3bc73"}, + {file = "boto3-1.20.17-py3-none-any.whl", hash = "sha256:b832c75386a4c5b7194acea1ae82dc309fddd69e660731350235d19cf70d8014"}, + {file = "boto3-1.20.17.tar.gz", hash = "sha256:41ea196ff71ee0255ad164790319ec158fd5048de915173e8b21226650a0512f"}, ] botocore = [ - {file = "botocore-1.23.5-py3-none-any.whl", hash = "sha256:c8eaeee0bac356396386aa9165043808fe736fb9e03ac0dedb1dfd82f41ad1a3"}, - {file = "botocore-1.23.5.tar.gz", hash = "sha256:49d1f012dc8467577a5fe603fc87cc13af816dd926b2bc2e28a3b2999ab14d36"}, + {file = "botocore-1.23.17-py3-none-any.whl", hash = "sha256:54240370476d8e67a97664d2c47df451f0e1d30e9d50ea0a88da4c2c27981159"}, + {file = "botocore-1.23.17.tar.gz", hash = "sha256:a9753b5220b5cc1bb8078086dc8ee10aa7da482b279dd0347965e9145a557003"}, ] certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +charset-normalizer = [ + {file = "charset-normalizer-2.0.8.tar.gz", hash = "sha256:735e240d9a8506778cd7a453d97e817e536bb1fc29f4f6961ce297b9c7a917b0"}, + {file = "charset_normalizer-2.0.8-py3-none-any.whl", hash = "sha256:83fcdeb225499d6344c8f7f34684c2981270beacc32ede2e669e94f7fa544405"}, ] click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-6.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:675adb3b3380967806b3cbb9c5b00ceb29b1c472692100a338730c1d3e59c8b9"}, - {file = "coverage-6.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95a58336aa111af54baa451c33266a8774780242cab3704b7698d5e514840758"}, - {file = "coverage-6.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0a595a781f8e186580ff8e3352dd4953b1944289bec7705377c80c7e36c4d6c"}, - {file = "coverage-6.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d3c5f49ce6af61154060640ad3b3281dbc46e2e0ef2fe78414d7f8a324f0b649"}, - {file = "coverage-6.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:310c40bed6b626fd1f463e5a83dba19a61c4eb74e1ac0d07d454ebbdf9047e9d"}, - {file = "coverage-6.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a4d48e42e17d3de212f9af44f81ab73b9378a4b2b8413fd708d0d9023f2bbde4"}, - {file = "coverage-6.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffa545230ca2ad921ad066bf8fd627e7be43716b6e0fcf8e32af1b8188ccb0ab"}, - {file = "coverage-6.1.2-cp310-cp310-win32.whl", hash = "sha256:cd2d11a59afa5001ff28073ceca24ae4c506da4355aba30d1e7dd2bd0d2206dc"}, - {file = "coverage-6.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:96129e41405887a53a9cc564f960d7f853cc63d178f3a182fdd302e4cab2745b"}, - {file = "coverage-6.1.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1de9c6f5039ee2b1860b7bad2c7bc3651fbeb9368e4c4d93e98a76358cdcb052"}, - {file = "coverage-6.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:80cb70264e9a1d04b519cdba3cd0dc42847bf8e982a4d55c769b9b0ee7cdce1e"}, - {file = "coverage-6.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:ba6125d4e55c0b8e913dad27b22722eac7abdcb1f3eab1bd090eee9105660266"}, - {file = "coverage-6.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8492d37acdc07a6eac6489f6c1954026f2260a85a4c2bb1e343fe3d35f5ee21a"}, - {file = "coverage-6.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66af99c7f7b64d050d37e795baadf515b4561124f25aae6e1baa482438ecc388"}, - {file = "coverage-6.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ebcc03e1acef4ff44f37f3c61df478d6e469a573aa688e5a162f85d7e4c3860d"}, - {file = "coverage-6.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98d44a8136eebbf544ad91fef5bd2b20ef0c9b459c65a833c923d9aa4546b204"}, - {file = "coverage-6.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c18725f3cffe96732ef96f3de1939d81215fd6d7d64900dcc4acfe514ea4fcbf"}, - {file = "coverage-6.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c8e9c4bcaaaa932be581b3d8b88b677489975f845f7714efc8cce77568b6711c"}, - {file = "coverage-6.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:06d009e8a29483cbc0520665bc46035ffe9ae0e7484a49f9782c2a716e37d0a0"}, - {file = "coverage-6.1.2-cp36-cp36m-win32.whl", hash = "sha256:e5432d9c329b11c27be45ee5f62cf20a33065d482c8dec1941d6670622a6fb8f"}, - {file = "coverage-6.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:82fdcb64bf08aa5db881db061d96db102c77397a570fbc112e21c48a4d9cb31b"}, - {file = "coverage-6.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:94f558f8555e79c48c422045f252ef41eb43becdd945e9c775b45ebfc0cbd78f"}, - {file = "coverage-6.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046647b96969fda1ae0605f61288635209dd69dcd27ba3ec0bf5148bc157f954"}, - {file = "coverage-6.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cc799916b618ec9fd00135e576424165691fec4f70d7dc12cfaef09268a2478c"}, - {file = "coverage-6.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:62646d98cf0381ffda301a816d6ac6c35fc97aa81b09c4c52d66a15c4bef9d7c"}, - {file = "coverage-6.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:27a3df08a855522dfef8b8635f58bab81341b2fb5f447819bc252da3aa4cf44c"}, - {file = "coverage-6.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:610c0ba11da8de3a753dc4b1f71894f9f9debfdde6559599f303286e70aeb0c2"}, - {file = "coverage-6.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:35b246ae3a2c042dc8f410c94bcb9754b18179cdb81ff9477a9089dbc9ecc186"}, - {file = "coverage-6.1.2-cp37-cp37m-win32.whl", hash = "sha256:0cde7d9fe2fb55ff68ebe7fb319ef188e9b88e0a3d1c9c5db7dd829cd93d2193"}, - {file = "coverage-6.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:958ac66272ff20e63d818627216e3d7412fdf68a2d25787b89a5c6f1eb7fdd93"}, - {file = "coverage-6.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a300b39c3d5905686c75a369d2a66e68fd01472ea42e16b38c948bd02b29e5bd"}, - {file = "coverage-6.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3855d5d26292539861f5ced2ed042fc2aa33a12f80e487053aed3bcb6ced13"}, - {file = "coverage-6.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:586d38dfc7da4a87f5816b203ff06dd7c1bb5b16211ccaa0e9788a8da2b93696"}, - {file = "coverage-6.1.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a34fccb45f7b2d890183a263578d60a392a1a218fdc12f5bce1477a6a68d4373"}, - {file = "coverage-6.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bc1ee1318f703bc6c971da700d74466e9b86e0c443eb85983fb2a1bd20447263"}, - {file = "coverage-6.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3f546f48d5d80a90a266769aa613bc0719cb3e9c2ef3529d53f463996dd15a9d"}, - {file = "coverage-6.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd92ece726055e80d4e3f01fff3b91f54b18c9c357c48fcf6119e87e2461a091"}, - {file = "coverage-6.1.2-cp38-cp38-win32.whl", hash = "sha256:24ed38ec86754c4d5a706fbd5b52b057c3df87901a8610d7e5642a08ec07087e"}, - {file = "coverage-6.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:97ef6e9119bd39d60ef7b9cd5deea2b34869c9f0b9777450a7e3759c1ab09b9b"}, - {file = "coverage-6.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e5a8c947a2a89c56655ecbb789458a3a8e3b0cbf4c04250331df8f647b3de59"}, - {file = "coverage-6.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a39590d1e6acf6a3c435c5d233f72f5d43b585f5be834cff1f21fec4afda225"}, - {file = "coverage-6.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9d2c2e3ce7b8cc932a2f918186964bd44de8c84e2f9ef72dc616f5bb8be22e71"}, - {file = "coverage-6.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3348865798c077c695cae00da0924136bb5cc501f236cfd6b6d9f7a3c94e0ec4"}, - {file = "coverage-6.1.2-cp39-cp39-win32.whl", hash = "sha256:fae3fe111670e51f1ebbc475823899524e3459ea2db2cb88279bbfb2a0b8a3de"}, - {file = "coverage-6.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:af45eea024c0e3a25462fade161afab4f0d9d9e0d5a5d53e86149f74f0a35ecc"}, - {file = "coverage-6.1.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:eab14fdd410500dae50fd14ccc332e65543e7b39f6fc076fe90603a0e5d2f929"}, - {file = "coverage-6.1.2.tar.gz", hash = "sha256:d9a635114b88c0ab462e0355472d00a180a5fbfd8511e7f18e4ac32652e7d972"}, + {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, + {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, + {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, + {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, + {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, + {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, + {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, + {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, + {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, + {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, + {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, + {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, + {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, + {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, + {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, + {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, + {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, + {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, + {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, + {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, ] dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, @@ -1189,8 +1189,8 @@ flake8-black = [ {file = "flake8_black-0.2.3-py3-none-any.whl", hash = "sha256:cc080ba5b3773b69ba102b6617a00cc4ecbad8914109690cfda4d565ea435d96"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-21.9.2.tar.gz", hash = "sha256:db9a09893a6c649a197f5350755100bb1dd84f110e60cf532fdfa07e41808ab2"}, - {file = "flake8_bugbear-21.9.2-py36.py37.py38-none-any.whl", hash = "sha256:4f7eaa6f05b7d7ea4cbbde93f7bcdc5438e79320fa1ec420d860c181af38b769"}, + {file = "flake8-bugbear-21.11.29.tar.gz", hash = "sha256:8b04cb2fafc6a78e1a9d873bd3988e4282f7959bb6b0d7c1ae648ec09b937a7b"}, + {file = "flake8_bugbear-21.11.29-py36.py37.py38-none-any.whl", hash = "sha256:179e41ddae5de5e3c20d1f61736feeb234e70958fbb56ab3c28a67739c8e9a82"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, @@ -1223,24 +1223,24 @@ future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] ghp-import = [ - {file = "ghp-import-2.0.1.tar.gz", hash = "sha256:753de2eace6e0f7d4edfb3cce5e3c3b98cd52aadb80163303d1d036bda7b4483"}, - {file = "ghp_import-2.0.1-py3-none-any.whl", hash = "sha256:8241a8e9f8dd3c1fafe9696e6e081b57a208ef907e9939c44e7415e407ab40ea"}, + {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, + {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, ] gitdb = [ - {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"}, - {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.17-py3-none-any.whl", hash = "sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135"}, - {file = "GitPython-3.1.17.tar.gz", hash = "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"}, + {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, + {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.0.1-py3-none-any.whl", hash = "sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d"}, - {file = "importlib_metadata-4.0.1.tar.gz", hash = "sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581"}, + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1251,24 +1251,24 @@ isort = [ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] jinja2 = [ - {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, - {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] jmespath = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] mako = [ - {file = "Mako-1.1.4-py2.py3-none-any.whl", hash = "sha256:aea166356da44b9b830c8023cd9b557fa856bd8b4035d6de771ca027dfc5cc6e"}, - {file = "Mako-1.1.4.tar.gz", hash = "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab"}, + {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, + {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, ] mando = [ {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"}, {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, ] markdown = [ - {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, - {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, + {file = "Markdown-3.3.5-py3-none-any.whl", hash = "sha256:0d2d09f75cb8d1ffc6770c65c61770b23a61708101f47bda416a002a0edbc480"}, + {file = "Markdown-3.3.5.tar.gz", hash = "sha256:26e9546bfbcde5fcd072bd8f612c9c1b6e2677cb8aadbdf65206674f46dde069"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, @@ -1366,8 +1366,8 @@ mkdocs-material = [ {file = "mkdocs_material-7.3.6-py2.py3-none-any.whl", hash = "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75"}, ] mkdocs-material-extensions = [ - {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, - {file = "mkdocs_material_extensions-1.0.1-py3-none-any.whl", hash = "sha256:d90c807a88348aa6d1805657ec5c0b2d8d609c110e62b9dce4daf7fa981fa338"}, + {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, + {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, ] mypy = [ {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, @@ -1399,16 +1399,16 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pbr = [ - {file = "pbr-5.6.0-py2.py3-none-any.whl", hash = "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"}, - {file = "pbr-5.6.0.tar.gz", hash = "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd"}, + {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, + {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, ] pdoc3 = [ {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, @@ -1418,12 +1418,12 @@ platformdirs = [ {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, ] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, @@ -1462,12 +1462,12 @@ pygments = [ {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, ] pymdown-extensions = [ - {file = "pymdown-extensions-9.0.tar.gz", hash = "sha256:01e4bec7f4b16beaba0087a74496401cf11afd69e3a11fe95cb593e5c698ef40"}, - {file = "pymdown_extensions-9.0-py3-none-any.whl", hash = "sha256:430cc2fbb30cef2df70edac0b4f62614a6a4d2b06462e32da4ca96098b7c1dfb"}, + {file = "pymdown-extensions-9.1.tar.gz", hash = "sha256:74247f2c80f1d9e3c7242abe1c16317da36c6f26c7ad4b8a7f457f0ec20f0365"}, + {file = "pymdown_extensions-9.1-py3-none-any.whl", hash = "sha256:b03e66f91f33af4a6e7a0e20c740313522995f69a03d86316b1449766c473d0e"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, @@ -1486,8 +1486,8 @@ pytest-mock = [ {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, @@ -1529,88 +1529,111 @@ radon = [ {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, ] regex = [ - {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, - {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, - {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, - {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, - {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, - {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, - {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, - {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, - {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, - {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, - {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, - {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, - {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9ed0b1e5e0759d6b7f8e2f143894b2a7f3edd313f38cf44e1e15d360e11749b"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:473e67837f786404570eae33c3b64a4b9635ae9f00145250851a1292f484c063"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2fee3ed82a011184807d2127f1733b4f6b2ff6ec7151d83ef3477f3b96a13d03"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d5fd67df77bab0d3f4ea1d7afca9ef15c2ee35dfb348c7b57ffb9782a6e4db6e"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5d408a642a5484b9b4d11dea15a489ea0928c7e410c7525cd892f4d04f2f617b"}, + {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, + {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, + {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74cbeac0451f27d4f50e6e8a8f3a52ca074b5e2da9f7b505c4201a57a8ed6286"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:3598893bde43091ee5ca0a6ad20f08a0435e93a69255eeb5f81b85e81e329264"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:50a7ddf3d131dc5633dccdb51417e2d1910d25cbcf842115a3a5893509140a3a"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:61600a7ca4bcf78a96a68a27c2ae9389763b5b94b63943d5158f2a377e09d29a"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:563d5f9354e15e048465061509403f68424fef37d5add3064038c2511c8f5e00"}, + {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, + {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, + {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:42b50fa6666b0d50c30a990527127334d6b96dd969011e843e726a64011485da"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6e1d2cc79e8dae442b3fa4a26c5794428b98f81389af90623ffcc650ce9f6732"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0416f7399e918c4b0e074a0f66e5191077ee2ca32a0f99d4c187a62beb47aa05"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ce298e3d0c65bd03fa65ffcc6db0e2b578e8f626d468db64fdf8457731052942"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dc07f021ee80510f3cd3af2cad5b6a3b3a10b057521d9e6aaeb621730d320c5a"}, + {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, + {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f5be7805e53dafe94d295399cfbe5227f39995a997f4fd8539bf3cbdc8f47ca8"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a955b747d620a50408b7fdf948e04359d6e762ff8a85f5775d907ceced715129"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:139a23d1f5d30db2cc6c7fd9c6d6497872a672db22c4ae1910be22d4f4b2068a"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ca49e1ab99593438b204e00f3970e7a5f70d045267051dfa6b5f4304fcfa1dbf"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96fc32c16ea6d60d3ca7f63397bff5c75c5a562f7db6dec7d412f7c4d2e78ec0"}, + {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, + {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cd410a1cbb2d297c67d8521759ab2ee3f1d66206d2e4328502a487589a2cb21b"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e6096b0688e6e14af6a1b10eaad86b4ff17935c49aa774eac7c95a57a4e8c296"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:529801a0d58809b60b3531ee804d3e3be4b412c94b5d267daa3de7fadef00f49"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f594b96fe2e0821d026365f72ac7b4f0b487487fb3d4aaf10dd9d97d88a9737"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2409b5c9cef7054dde93a9803156b411b677affc84fca69e908b1cb2c540025d"}, + {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, + {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, + {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] "ruamel.yaml" = [ - {file = "ruamel.yaml-0.17.4-py3-none-any.whl", hash = "sha256:ac79fb25f5476e8e9ed1c53b8a2286d2c3f5dde49eb37dbcee5c7eb6a8415a22"}, - {file = "ruamel.yaml-0.17.4.tar.gz", hash = "sha256:44bc6b54fddd45e4bc0619059196679f9e8b79c027f4131bb072e6a22f4d5e28"}, + {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, + {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, ] "ruamel.yaml.clib" = [ - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win32.whl", hash = "sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win_amd64.whl", hash = "sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win32.whl", hash = "sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win32.whl", hash = "sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win32.whl", hash = "sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win32.whl", hash = "sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb"}, - {file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, + {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, ] s3transfer = [ {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, @@ -1621,24 +1644,24 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] smmap = [ - {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, - {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"}, + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] stevedore = [ - {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, - {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, + {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, + {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, ] testfixtures = [ - {file = "testfixtures-6.17.1-py2.py3-none-any.whl", hash = "sha256:9ed31e83f59619e2fa17df053b241e16e0608f4580f7b5a9333a0c9bdcc99137"}, - {file = "testfixtures-6.17.1.tar.gz", hash = "sha256:5ec3a0dd6f71cc4c304fbc024a10cc293d3e0b852c868014b9f233203e149bda"}, + {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, + {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"}, - {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"}, + {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, + {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, ] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, @@ -1673,45 +1696,96 @@ typed-ast = [ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, - {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, - {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] urllib3 = [ - {file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"}, - {file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"}, + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, ] watchdog = [ - {file = "watchdog-2.1.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9628f3f85375a17614a2ab5eac7665f7f7be8b6b0a2a228e6f6a2e91dd4bfe26"}, - {file = "watchdog-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:acc4e2d5be6f140f02ee8590e51c002829e2c33ee199036fcd61311d558d89f4"}, - {file = "watchdog-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:85b851237cf3533fabbc034ffcd84d0fa52014b3121454e5f8b86974b531560c"}, - {file = "watchdog-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a12539ecf2478a94e4ba4d13476bb2c7a2e0a2080af2bb37df84d88b1b01358a"}, - {file = "watchdog-2.1.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6fe9c8533e955c6589cfea6f3f0a1a95fb16867a211125236c82e1815932b5d7"}, - {file = "watchdog-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d9456f0433845e7153b102fffeb767bde2406b76042f2216838af3b21707894e"}, - {file = "watchdog-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fd8c595d5a93abd441ee7c5bb3ff0d7170e79031520d113d6f401d0cf49d7c8f"}, - {file = "watchdog-2.1.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0bcfe904c7d404eb6905f7106c54873503b442e8e918cc226e1828f498bdc0ca"}, - {file = "watchdog-2.1.3-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bf84bd94cbaad8f6b9cbaeef43080920f4cb0e61ad90af7106b3de402f5fe127"}, - {file = "watchdog-2.1.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b8ddb2c9f92e0c686ea77341dcb58216fa5ff7d5f992c7278ee8a392a06e86bb"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8805a5f468862daf1e4f4447b0ccf3acaff626eaa57fbb46d7960d1cf09f2e6d"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:3e305ea2757f81d8ebd8559d1a944ed83e3ab1bdf68bcf16ec851b97c08dc035"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_i686.whl", hash = "sha256:431a3ea70b20962e6dee65f0eeecd768cd3085ea613ccb9b53c8969de9f6ebd2"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:e4929ac2aaa2e4f1a30a36751160be391911da463a8799460340901517298b13"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:201cadf0b8c11922f54ec97482f95b2aafca429c4c3a4bb869a14f3c20c32686"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:3a7d242a7963174684206093846537220ee37ba9986b824a326a8bb4ef329a33"}, - {file = "watchdog-2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:54e057727dd18bd01a3060dbf5104eb5a495ca26316487e0f32a394fd5fe725a"}, - {file = "watchdog-2.1.3-py3-none-win32.whl", hash = "sha256:b5fc5c127bad6983eecf1ad117ab3418949f18af9c8758bd10158be3647298a9"}, - {file = "watchdog-2.1.3-py3-none-win_amd64.whl", hash = "sha256:44acad6f642996a2b50bb9ce4fb3730dde08f23e79e20cd3d8e2a2076b730381"}, - {file = "watchdog-2.1.3-py3-none-win_ia64.whl", hash = "sha256:0bcdf7b99b56a3ae069866c33d247c9994ffde91b620eaf0306b27e099bd1ae0"}, - {file = "watchdog-2.1.3.tar.gz", hash = "sha256:e5236a8e8602ab6db4b873664c2d356c365ab3cac96fbdec4970ad616415dd45"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9693f35162dc6208d10b10ddf0458cc09ad70c30ba689d9206e02cd836ce28a3"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aba5c812f8ee8a3ff3be51887ca2d55fb8e268439ed44110d3846e4229eb0e8b"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ae38bf8ba6f39d5b83f78661273216e7db5b00f08be7592062cb1fc8b8ba542"}, + {file = "watchdog-2.1.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ad6f1796e37db2223d2a3f302f586f74c72c630b48a9872c1e7ae8e92e0ab669"}, + {file = "watchdog-2.1.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:922a69fa533cb0c793b483becaaa0845f655151e7256ec73630a1b2e9ebcb660"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b2fcf9402fde2672545b139694284dc3b665fd1be660d73eca6805197ef776a3"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3386b367e950a11b0568062b70cc026c6f645428a698d33d39e013aaeda4cc04"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f1c00aa35f504197561060ca4c21d3cc079ba29cf6dd2fe61024c70160c990b"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b52b88021b9541a60531142b0a451baca08d28b74a723d0c99b13c8c8d48d604"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8047da932432aa32c515ec1447ea79ce578d0559362ca3605f8e9568f844e3c6"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e92c2d33858c8f560671b448205a268096e17870dcf60a9bb3ac7bfbafb7f5f9"}, + {file = "watchdog-2.1.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7d336912853d7b77f9b2c24eeed6a5065d0a0cc0d3b6a5a45ad6d1d05fb8cd8"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cca7741c0fcc765568350cb139e92b7f9f3c9a08c4f32591d18ab0a6ac9e71b6"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_armv7l.whl", hash = "sha256:25fb5240b195d17de949588628fdf93032ebf163524ef08933db0ea1f99bd685"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_i686.whl", hash = "sha256:be9be735f827820a06340dff2ddea1fb7234561fa5e6300a62fe7f54d40546a0"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0d19fb2441947b58fbf91336638c2b9f4cc98e05e1045404d7a4cb7cddc7a65"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:3becdb380d8916c873ad512f1701f8a92ce79ec6978ffde92919fd18d41da7fb"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_s390x.whl", hash = "sha256:ae67501c95606072aafa865b6ed47343ac6484472a2f95490ba151f6347acfc2"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e0f30db709c939cabf64a6dc5babb276e6d823fd84464ab916f9b9ba5623ca15"}, + {file = "watchdog-2.1.6-py3-none-win32.whl", hash = "sha256:e02794ac791662a5eafc6ffeaf9bcc149035a0e48eb0a9d40a8feb4622605a3d"}, + {file = "watchdog-2.1.6-py3-none-win_amd64.whl", hash = "sha256:bd9ba4f332cf57b2c1f698be0728c020399ef3040577cde2939f2e045b39c1e5"}, + {file = "watchdog-2.1.6-py3-none-win_ia64.whl", hash = "sha256:a0f1c7edf116a12f7245be06120b1852275f9506a7d90227648b250755a03923"}, + {file = "watchdog-2.1.6.tar.gz", hash = "sha256:a36e75df6c767cbf46f61a91c70b3ba71811dfa0aca4a324d9407a06a8b7a2e7"}, ] wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, + {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, + {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, + {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, + {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, + {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, + {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, + {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, + {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, + {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, + {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, + {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, + {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, + {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, + {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, + {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, + {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, + {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, + {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, + {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, ] xenon = [ {file = "xenon-0.8.0-py2.py3-none-any.whl", hash = "sha256:4c3d7157d9ae058364e130c831702e4a65a1f729d4b4def912418ed09772c851"}, {file = "xenon-0.8.0.tar.gz", hash = "sha256:cd5cad0930673d0e52609712c63fe4721a8f4c4342dc338bd7ea5fa0666b8515"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/pyproject.toml b/pyproject.toml index f999daf4d28..23b849f642e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ pydantic = {version = "^1.8.2", optional = true } email-validator = {version = "*", optional = true } [tool.poetry.dev-dependencies] -coverage = {extras = ["toml"], version = "^6.1"} +coverage = {extras = ["toml"], version = "^6.2"} pytest = "^6.2.5" black = "^21.11b1" flake8 = "^4.0.1" @@ -45,11 +45,11 @@ pytest-cov = "^3.0.0" pytest-mock = "^3.5.1" pdoc3 = "^0.10.0" pytest-asyncio = "^0.16.0" -bandit = "^1.7.0" +bandit = "^1.7.1" radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.2.0" -flake8-bugbear = "^21.9.2" +flake8-bugbear = "^21.11.29" mkdocs-material = "^7.3.6" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" From 5cb9852ae1acf8e190f858d13cd070def5ee29bc Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Mon, 6 Dec 2021 21:38:29 -0800 Subject: [PATCH 17/36] fix(idempotency): include decorated fn name in hash (#869) --- .../utilities/idempotency/base.py | 2 +- .../utilities/idempotency/persistence/base.py | 10 ++- tests/functional/idempotency/conftest.py | 4 +- .../idempotency/test_idempotency.py | 64 +++++++++++++++---- 4 files changed, 62 insertions(+), 18 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/base.py b/aws_lambda_powertools/utilities/idempotency/base.py index 4b82c923a70..7dee94fc356 100644 --- a/aws_lambda_powertools/utilities/idempotency/base.py +++ b/aws_lambda_powertools/utilities/idempotency/base.py @@ -56,7 +56,7 @@ def __init__( self.fn_args = function_args self.fn_kwargs = function_kwargs - persistence_store.configure(config) + persistence_store.configure(config, self.function.__name__) self.persistence_store = persistence_store def handle(self) -> Any: diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 907af8edaa7..8f2b30d289a 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -112,6 +112,7 @@ class BasePersistenceLayer(ABC): def __init__(self): """Initialize the defaults""" + self.function_name = "" self.configured = False self.event_key_jmespath: Optional[str] = None self.event_key_compiled_jmespath = None @@ -124,7 +125,7 @@ def __init__(self): self._cache: Optional[LRUDict] = None self.hash_function = None - def configure(self, config: IdempotencyConfig) -> None: + def configure(self, config: IdempotencyConfig, function_name: Optional[str] = None) -> None: """ Initialize the base persistence layer from the configuration settings @@ -132,7 +133,11 @@ def configure(self, config: IdempotencyConfig) -> None: ---------- config: IdempotencyConfig Idempotency configuration settings + function_name: str, Optional + The name of the function being decorated """ + self.function_name = f"{os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, 'test-func')}.{function_name or ''}" + if self.configured: # Prevent being reconfigured multiple times return @@ -178,8 +183,7 @@ def _get_hashed_idempotency_key(self, data: Dict[str, Any]) -> str: warnings.warn(f"No value found for idempotency_key. jmespath: {self.event_key_jmespath}") generated_hash = self._generate_hash(data=data) - function_name = os.getenv(constants.LAMBDA_FUNCTION_NAME_ENV, "test-func") - return f"{function_name}#{generated_hash}" + return f"{self.function_name}#{generated_hash}" @staticmethod def is_missing_idempotency_key(data) -> bool: diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index 71b5978497c..0f74d503b88 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -150,7 +150,7 @@ def expected_params_put_item_with_validation(hashed_idempotency_key, hashed_vali def hashed_idempotency_key(lambda_apigw_event, default_jmespath, lambda_context): compiled_jmespath = jmespath.compile(default_jmespath) data = compiled_jmespath.search(lambda_apigw_event) - return "test-func#" + hashlib.md5(serialize(data).encode()).hexdigest() + return "test-func.lambda_handler#" + hashlib.md5(serialize(data).encode()).hexdigest() @pytest.fixture @@ -158,7 +158,7 @@ def hashed_idempotency_key_with_envelope(lambda_apigw_event): event = extract_data_from_envelope( data=lambda_apigw_event, envelope=envelopes.API_GATEWAY_HTTP, jmespath_options={} ) - return "test-func#" + hashlib.md5(serialize(event).encode()).hexdigest() + return "test-func.lambda_handler#" + hashlib.md5(serialize(event).encode()).hexdigest() @pytest.fixture diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 043fb06a04a..a8cf652d8a0 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -735,7 +735,8 @@ def test_default_no_raise_on_missing_idempotency_key( idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer, lambda_context ): # GIVEN a persistence_store with use_local_cache = False and event_key_jmespath = "body" - persistence_store.configure(idempotency_config) + function_name = "foo" + persistence_store.configure(idempotency_config, function_name) assert persistence_store.use_local_cache is False assert "body" in persistence_store.event_key_jmespath @@ -743,7 +744,7 @@ def test_default_no_raise_on_missing_idempotency_key( hashed_key = persistence_store._get_hashed_idempotency_key({}) # THEN return the hash of None - expected_value = "test-func#" + md5(serialize(None).encode()).hexdigest() + expected_value = f"test-func.{function_name}#" + md5(serialize(None).encode()).hexdigest() assert expected_value == hashed_key @@ -781,7 +782,7 @@ def test_jmespath_with_powertools_json( idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer, lambda_context ): # GIVEN an event_key_jmespath with powertools_json custom function - persistence_store.configure(idempotency_config) + persistence_store.configure(idempotency_config, "handler") sub_attr_value = "cognito_user" static_pk_value = "some_key" expected_value = [sub_attr_value, static_pk_value] @@ -794,14 +795,14 @@ def test_jmespath_with_powertools_json( result = persistence_store._get_hashed_idempotency_key(api_gateway_proxy_event) # THEN the hashed idempotency key should match the extracted values generated hash - assert result == "test-func#" + persistence_store._generate_hash(expected_value) + assert result == "test-func.handler#" + persistence_store._generate_hash(expected_value) @pytest.mark.parametrize("config_with_jmespath_options", ["powertools_json(data).payload"], indirect=True) def test_custom_jmespath_function_overrides_builtin_functions( config_with_jmespath_options: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer, lambda_context ): - # GIVEN an persistence store with a custom jmespath_options + # GIVEN a persistence store with a custom jmespath_options # AND use a builtin powertools custom function persistence_store.configure(config_with_jmespath_options) @@ -871,7 +872,9 @@ def _delete_record(self, data_record: DataRecord) -> None: def test_idempotent_lambda_event_source(lambda_context): # Scenario to validate that we can use the event_source decorator before or after the idempotent decorator mock_event = load_event("apiGatewayProxyV2Event.json") - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.lambda_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) expected_result = {"message": "Foo"} # GIVEN an event_source decorator @@ -891,7 +894,9 @@ def lambda_handler(event, _): def test_idempotent_function(): # Scenario to validate we can use idempotent_function with any function mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.record_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) expected_result = {"message": "Foo"} @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") @@ -908,7 +913,9 @@ def test_idempotent_function_arbitrary_args_kwargs(): # Scenario to validate we can use idempotent_function with a function # with an arbitrary number of args and kwargs mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.record_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) expected_result = {"message": "Foo"} @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") @@ -923,7 +930,9 @@ def record_handler(arg_one, arg_two, record, is_record): def test_idempotent_function_invalid_data_kwarg(): mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.record_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) expected_result = {"message": "Foo"} keyword_argument = "payload" @@ -940,7 +949,9 @@ def record_handler(record): def test_idempotent_function_arg_instead_of_kwarg(): mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.record_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) expected_result = {"message": "Foo"} keyword_argument = "record" @@ -958,13 +969,19 @@ def record_handler(record): def test_idempotent_function_and_lambda_handler(lambda_context): # Scenario to validate we can use both idempotent_function and idempotent decorators mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.record_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) expected_result = {"message": "Foo"} @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") def record_handler(record): return expected_result + persistence_layer = MockPersistenceLayer( + "test-func.lambda_handler#" + hashlib.md5(serialize(mock_event).encode()).hexdigest() + ) + @idempotent(persistence_store=persistence_layer) def lambda_handler(event, _): return expected_result @@ -986,7 +1003,9 @@ def test_idempotent_data_sorting(): data_two = {"more_data": "more data 1", "data": "test message 1"} # Assertion will happen in MockPersistenceLayer - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(data_one).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer( + "test-func.dummy#" + hashlib.md5(json.dumps(data_one).encode()).hexdigest() + ) # GIVEN @idempotent_function(data_keyword_argument="payload", persistence_store=persistence_layer) @@ -1017,3 +1036,24 @@ def dummy_handler(event, context): dummy_handler(mock_event, lambda_context) assert len(persistence_store.table.method_calls) == 0 + + +@pytest.mark.parametrize("idempotency_config", [{"use_local_cache": True}], indirect=True) +def test_idempotent_function_duplicates( + idempotency_config: IdempotencyConfig, persistence_store: DynamoDBPersistenceLayer +): + # Scenario to validate the both methods are called + mock_event = {"data": "value"} + persistence_store.table = MagicMock() + + @idempotent_function(data_keyword_argument="data", persistence_store=persistence_store, config=idempotency_config) + def one(data): + return "one" + + @idempotent_function(data_keyword_argument="data", persistence_store=persistence_store, config=idempotency_config) + def two(data): + return "two" + + assert one(data=mock_event) == "one" + assert two(data=mock_event) == "two" + assert len(persistence_store.table.method_calls) == 4 From de26a949c69ca530cabbe539d7a62de2d32dffcc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Dec 2021 07:02:37 +0000 Subject: [PATCH 18/36] chore(deps): bump aws-xray-sdk from 2.8.0 to 2.9.0 (#876) Bumps [aws-xray-sdk](https://github.com/aws/aws-xray-sdk-python) from 2.8.0 to 2.9.0.
Release notes

Sourced from aws-xray-sdk's releases.

2.9.0 Release

See details in CHANGELOG

Changelog

Sourced from aws-xray-sdk's changelog.

2.9.0

  • bugfix: Change logging behavior to avoid overflow. PR302 <https://github.com/aws/aws-xray-sdk-python/pull/302>_.
  • improvement: Lazy load samplers to speed up cold start in lambda. PR312 <https://github.com/aws/aws-xray-sdk-python/pull/312>_.
  • improvement: Replace slow json file name resolver. PR 306 <https://github.com/aws/aws-xray-sdk-python/pull/306>_.
Commits
  • be4dea2 Release commit for v2.9.0 (#318)
  • 9858cab Merge pull request #317 from wangzlei/master
  • 85d8801 Remove redundant error log MISSING_SEGMENT_MSG
  • 05f5e8f Merge pull request #315 from aws/willarmiros-patch-1
  • e1841e6 Create CODEOWNERS
  • 0e1f935 Merge pull request #312 from maxday/maxday/lazy-load-samplers
  • f4b33f0 lazy load samplers
  • 86248a5 Merge pull request #306 from NathanielRN/remove-slow-filepath-resolve
  • aa4b2f5 Benchmark tests should both consistently return
  • 8ad460e Add benchmarks for json read
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aws-xray-sdk&package-manager=pip&previous-version=2.8.0&new-version=2.9.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 65ca63e79c3..600c4f0f4ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -22,7 +22,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "aws-xray-sdk" -version = "2.8.0" +version = "2.9.0" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." category = "main" optional = false @@ -1077,8 +1077,8 @@ attrs = [ {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] aws-xray-sdk = [ - {file = "aws-xray-sdk-2.8.0.tar.gz", hash = "sha256:90c2fcc982a770e86d009a4c3d2b5c3e372da91cb8284d982bae458e2c0bb268"}, - {file = "aws_xray_sdk-2.8.0-py2.py3-none-any.whl", hash = "sha256:487e44a2e0b2a5b994f7db5fad3a8115f1ea238249117a119bce8ca2750661bd"}, + {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, + {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, ] bandit = [ {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"}, @@ -1411,6 +1411,7 @@ pbr = [ {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, ] pdoc3 = [ + {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, ] platformdirs = [ From 967f63b6a4868caf66ecb44294976961705a71ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Dec 2021 07:12:34 +0000 Subject: [PATCH 19/36] chore(deps-dev): bump black from 21.11b1 to 21.12b0 (#872) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [black](https://github.com/psf/black) from 21.11b1 to 21.12b0.
Release notes

Sourced from black's releases.

21.12b0

Black

  • Fix determination of f-string expression spans (#2654)
  • Fix bad formatting of error messages about EOF in multi-line statements (#2343)
  • Functions and classes in blocks now have more consistent surrounding spacing (#2472)

Jupyter Notebook support

  • Cell magics are now only processed if they are known Python cell magics. Earlier, all cell magics were tokenized, leading to possible indentation errors e.g. with %%writefile. (#2630)
  • Fix assignment to environment variables in Jupyter Notebooks (#2642)

Python 3.10 support

  • Point users to using --target-version py310 if we detect 3.10-only syntax (#2668)
  • Fix match statements with open sequence subjects, like match a, b: or match a, *b: (#2639) (#2659)
  • Fix match/case statements that contain match/case soft keywords multiple times, like match re.match() (#2661)
  • Fix case statements with an inline body (#2665)
  • Fix styling of starred expressions inside match subject (#2667)
  • Fix parser error location on invalid syntax in a match statement (#2649)
  • Fix Python 3.10 support on platforms without ProcessPoolExecutor (#2631)
  • Improve parsing performance on code that uses match under --target-version py310 up to ~50% (#2670)

Packaging


Thank you!

  • @​isidentical for the polishing up 3.10 syntax support (which they contributed in the first place!)
  • @​MarcoGorelli for their ever-continuing work on Black's jupyter support
  • @​jalaziz for cleaning up our Pyinstaller CD workflow
  • @​hauntsaninja for helping us drop the regex dependency

And also congrats to first contributors!

Changelog

Sourced from black's changelog.

21.12b0

Black

  • Fix determination of f-string expression spans (#2654)
  • Fix bad formatting of error messages about EOF in multi-line statements (#2343)
  • Functions and classes in blocks now have more consistent surrounding spacing (#2472)

Jupyter Notebook support

  • Cell magics are now only processed if they are known Python cell magics. Earlier, all cell magics were tokenized, leading to possible indentation errors e.g. with %%writefile. (#2630)
  • Fix assignment to environment variables in Jupyter Notebooks (#2642)

Python 3.10 support

  • Point users to using --target-version py310 if we detect 3.10-only syntax (#2668)
  • Fix match statements with open sequence subjects, like match a, b: or match a, *b: (#2639) (#2659)
  • Fix match/case statements that contain match/case soft keywords multiple times, like match re.match() (#2661)
  • Fix case statements with an inline body (#2665)
  • Fix styling of starred expressions inside match subject (#2667)
  • Fix parser error location on invalid syntax in a match statement (#2649)
  • Fix Python 3.10 support on platforms without ProcessPoolExecutor (#2631)
  • Improve parsing performance on code that uses match under --target-version py310 up to ~50% (#2670)

Packaging

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=black&package-manager=pip&previous-version=21.11b1&new-version=21.12b0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 93 +++----------------------------------------------- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 90 deletions(-) diff --git a/poetry.lock b/poetry.lock index 600c4f0f4ea..a003d0fb93b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -49,7 +49,7 @@ stevedore = ">=1.20.0" [[package]] name = "black" -version = "21.11b1" +version = "21.12b0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -61,7 +61,6 @@ dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0,<1" platformdirs = ">=2" -regex = ">=2021.4.4" tomli = ">=0.2.6,<2.0.0" typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ @@ -866,14 +865,6 @@ colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} future = "*" mando = ">=0.6,<0.7" -[[package]] -name = "regex" -version = "2021.11.10" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "requests" version = "2.26.0" @@ -1065,7 +1056,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "17510d9a98137b8f430df0fbca1bcb3037aab183a4ba2e8d5fdad8e7e81574fc" +content-hash = "d003a9b82e3692f6e55a5dde89dae18796d9c5747c9d097a0ec113ecb4e02f02" [metadata.files] atomicwrites = [ @@ -1085,8 +1076,8 @@ bandit = [ {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, ] black = [ - {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, - {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, + {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, + {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] boto3 = [ {file = "boto3-1.20.17-py3-none-any.whl", hash = "sha256:b832c75386a4c5b7194acea1ae82dc309fddd69e660731350235d19cf70d8014"}, @@ -1529,82 +1520,6 @@ radon = [ {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, ] -regex = [ - {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, - {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, - {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9ed0b1e5e0759d6b7f8e2f143894b2a7f3edd313f38cf44e1e15d360e11749b"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:473e67837f786404570eae33c3b64a4b9635ae9f00145250851a1292f484c063"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2fee3ed82a011184807d2127f1733b4f6b2ff6ec7151d83ef3477f3b96a13d03"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d5fd67df77bab0d3f4ea1d7afca9ef15c2ee35dfb348c7b57ffb9782a6e4db6e"}, - {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5d408a642a5484b9b4d11dea15a489ea0928c7e410c7525cd892f4d04f2f617b"}, - {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, - {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, - {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, - {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74cbeac0451f27d4f50e6e8a8f3a52ca074b5e2da9f7b505c4201a57a8ed6286"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:3598893bde43091ee5ca0a6ad20f08a0435e93a69255eeb5f81b85e81e329264"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:50a7ddf3d131dc5633dccdb51417e2d1910d25cbcf842115a3a5893509140a3a"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:61600a7ca4bcf78a96a68a27c2ae9389763b5b94b63943d5158f2a377e09d29a"}, - {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:563d5f9354e15e048465061509403f68424fef37d5add3064038c2511c8f5e00"}, - {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, - {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, - {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, - {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:42b50fa6666b0d50c30a990527127334d6b96dd969011e843e726a64011485da"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6e1d2cc79e8dae442b3fa4a26c5794428b98f81389af90623ffcc650ce9f6732"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0416f7399e918c4b0e074a0f66e5191077ee2ca32a0f99d4c187a62beb47aa05"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ce298e3d0c65bd03fa65ffcc6db0e2b578e8f626d468db64fdf8457731052942"}, - {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dc07f021ee80510f3cd3af2cad5b6a3b3a10b057521d9e6aaeb621730d320c5a"}, - {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, - {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, - {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, - {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, - {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f5be7805e53dafe94d295399cfbe5227f39995a997f4fd8539bf3cbdc8f47ca8"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a955b747d620a50408b7fdf948e04359d6e762ff8a85f5775d907ceced715129"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:139a23d1f5d30db2cc6c7fd9c6d6497872a672db22c4ae1910be22d4f4b2068a"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ca49e1ab99593438b204e00f3970e7a5f70d045267051dfa6b5f4304fcfa1dbf"}, - {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96fc32c16ea6d60d3ca7f63397bff5c75c5a562f7db6dec7d412f7c4d2e78ec0"}, - {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, - {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, - {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, - {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, - {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cd410a1cbb2d297c67d8521759ab2ee3f1d66206d2e4328502a487589a2cb21b"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e6096b0688e6e14af6a1b10eaad86b4ff17935c49aa774eac7c95a57a4e8c296"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:529801a0d58809b60b3531ee804d3e3be4b412c94b5d267daa3de7fadef00f49"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f594b96fe2e0821d026365f72ac7b4f0b487487fb3d4aaf10dd9d97d88a9737"}, - {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2409b5c9cef7054dde93a9803156b411b677affc84fca69e908b1cb2c540025d"}, - {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, - {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, - {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, -] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, diff --git a/pyproject.toml b/pyproject.toml index 23b849f642e..feded5bf884 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ email-validator = {version = "*", optional = true } [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^6.2"} pytest = "^6.2.5" -black = "^21.11b1" +black = "^21.12b0" flake8 = "^4.0.1" flake8-black = "^0.2.3" flake8-builtins = "^1.5.3" From e1927d5d7d458441bf2eb7b5cea64c7c2b09ddae Mon Sep 17 00:00:00 2001 From: "Shane R. Spencer" <305301+whardier@users.noreply.github.com> Date: Tue, 7 Dec 2021 23:56:04 -0900 Subject: [PATCH 20/36] fix(metrics): explicit type to single_metric ctx manager (#865) Co-authored-by: Heitor Lessa --- aws_lambda_powertools/metrics/metric.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index a30f428e38e..76ff4339dea 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -1,7 +1,7 @@ import json import logging from contextlib import contextmanager -from typing import Dict, Optional, Union +from typing import Dict, Optional, Union, Generator from .base import MetricManager, MetricUnit @@ -61,7 +61,7 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N @contextmanager -def single_metric(name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None): +def single_metric(name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None) -> Generator[SingleMetric, None, None]: """Context manager to simplify creation of a single metric Example From d4c29b72c7f9cd034c7e24af6b906441aaae1e20 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 8 Dec 2021 12:17:20 +0100 Subject: [PATCH 21/36] chore: correct pr label order Signed-off-by: heitorlessa --- .github/boring-cyborg.yml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 9ccdbc363be..31aa24bc94f 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -1,10 +1,5 @@ ##### Labeler ########################################################################################################## labelPRBasedOnFilePath: - area/utilities: - - aws_lambda_powertools/utilities/* - - aws_lambda_powertools/utilities/**/* - - aws_lambda_powertools/middleware_factory/* - - aws_lambda_powertools/middleware_factory/**/* area/logger: - aws_lambda_powertools/logging/* - aws_lambda_powertools/logging/**/* @@ -42,6 +37,13 @@ labelPRBasedOnFilePath: area/feature_flags: - aws_lambda_powertools/feature_flags/* - aws_lambda_powertools/feature_flags/**/* + area/jmespath_util: + - aws_lambda_powertools/utilities/jmespath_utils/* + area/utilities: + - aws_lambda_powertools/utilities/* + - aws_lambda_powertools/utilities/**/* + - aws_lambda_powertools/middleware_factory/* + - aws_lambda_powertools/middleware_factory/**/* documentation: - docs/* From de3cf2871eae355cc95502299172e901f860ff77 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Wed, 8 Dec 2021 10:49:10 -0800 Subject: [PATCH 22/36] fix(event-sources): handle claimsOverrideDetails set to null (#878) --- aws_lambda_powertools/metrics/metric.py | 6 ++++-- .../utilities/data_classes/cognito_user_pool_event.py | 5 +++-- tests/functional/test_data_classes.py | 10 ++++++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 76ff4339dea..94b427738a1 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -1,7 +1,7 @@ import json import logging from contextlib import contextmanager -from typing import Dict, Optional, Union, Generator +from typing import Dict, Generator, Optional, Union from .base import MetricManager, MetricUnit @@ -61,7 +61,9 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N @contextmanager -def single_metric(name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None) -> Generator[SingleMetric, None, None]: +def single_metric( + name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None +) -> Generator[SingleMetric, None, None]: """Context manager to simplify creation of a single metric Example diff --git a/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py b/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py index e467875305f..954d3d15b5f 100644 --- a/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py +++ b/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py @@ -523,8 +523,9 @@ def set_group_configuration_preferred_role(self, value: str): class PreTokenGenerationTriggerEventResponse(DictWrapper): @property def claims_override_details(self) -> ClaimsOverrideDetails: - # Ensure we have a `claimsOverrideDetails` element - self._data["response"].setdefault("claimsOverrideDetails", {}) + # Ensure we have a `claimsOverrideDetails` element and is not set to None + if self._data["response"].get("claimsOverrideDetails") is None: + self._data["response"]["claimsOverrideDetails"] = {} return ClaimsOverrideDetails(self._data["response"]["claimsOverrideDetails"]) diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 5514a888e7d..ded32639233 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -281,6 +281,16 @@ def test_cognito_pre_token_generation_trigger_event(): assert claims_override_details.group_configuration.preferred_role == "role_name" assert event["response"]["claimsOverrideDetails"]["groupOverrideDetails"]["preferredRole"] == "role_name" + # Ensure that even if "claimsOverrideDetails" was explicitly set to None + # accessing `event.response.claims_override_details` would set it to `{}` + event["response"]["claimsOverrideDetails"] = None + claims_override_details = event.response.claims_override_details + assert claims_override_details._data == {} + assert event["response"]["claimsOverrideDetails"] == {} + claims_override_details.claims_to_suppress = ["email"] + assert claims_override_details.claims_to_suppress[0] == "email" + assert event["response"]["claimsOverrideDetails"]["claimsToSuppress"] == ["email"] + def test_cognito_define_auth_challenge_trigger_event(): event = DefineAuthChallengeTriggerEvent(load_event("cognitoDefineAuthChallengeEvent.json")) From 49943491e0bcc87d7975e4ed022ed648f455d53c Mon Sep 17 00:00:00 2001 From: Ran Isenberg <60175085+ran-isenberg@users.noreply.github.com> Date: Thu, 9 Dec 2021 14:07:57 +0200 Subject: [PATCH 23/36] fix(parameters): appconfig transform and return types (#877) Co-authored-by: Ran Isenberg Co-authored-by: Heitor Lessa --- .../utilities/parameters/base.py | 4 ++- tests/functional/test_utilities_parameters.py | 31 +++++++++++++++++-- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index b3b907bc18b..b059a3b2483 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -93,6 +93,8 @@ def get( raise GetParameterError(str(exc)) if transform is not None: + if isinstance(value, bytes): + value = value.decode("utf-8") value = transform_value(value, transform) self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age)) @@ -100,7 +102,7 @@ def get( return value @abstractmethod - def _get(self, name: str, **sdk_options) -> str: + def _get(self, name: str, **sdk_options) -> Union[str, bytes]: """ Retrieve parameter value from the underlying parameter store """ diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py index 79b8bfb2fd0..47fc5a0e982 100644 --- a/tests/functional/test_utilities_parameters.py +++ b/tests/functional/test_utilities_parameters.py @@ -1503,7 +1503,7 @@ def test_appconf_provider_get_configuration_no_transform(mock_name, config): stubber.activate() try: - value = provider.get(mock_name) + value: str = provider.get(mock_name) str_value = value.decode("utf-8") assert str_value == json.dumps(mock_body_json) stubber.assert_no_pending_responses() @@ -1516,11 +1516,12 @@ def test_appconf_get_app_config_no_transform(monkeypatch, mock_name): Test get_app_config() """ mock_body_json = {"myenvvar1": "Black Panther", "myenvvar2": 3} + mock_body_bytes = str.encode(json.dumps(mock_body_json)) class TestProvider(BaseProvider): - def _get(self, name: str, **kwargs) -> str: + def _get(self, name: str, **kwargs) -> bytes: assert name == mock_name - return json.dumps(mock_body_json).encode("utf-8") + return mock_body_bytes def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: raise NotImplementedError() @@ -1532,6 +1533,30 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: value = parameters.get_app_config(mock_name, environment=environment, application=application) str_value = value.decode("utf-8") assert str_value == json.dumps(mock_body_json) + assert value == mock_body_bytes + + +def test_appconf_get_app_config_transform_json(monkeypatch, mock_name): + """ + Test get_app_config() + """ + mock_body_json = {"myenvvar1": "Black Panther", "myenvvar2": 3} + mock_body_bytes = str.encode(json.dumps(mock_body_json)) + + class TestProvider(BaseProvider): + def _get(self, name: str, **kwargs) -> str: + assert name == mock_name + return mock_body_bytes + + def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: + raise NotImplementedError() + + monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "appconfig", TestProvider()) + + environment = "dev" + application = "myapp" + value = parameters.get_app_config(mock_name, environment=environment, application=application, transform="json") + assert value == mock_body_json def test_appconf_get_app_config_new(monkeypatch, mock_name, mock_value): From 8de372914ade16f18a72484dbced8f4aba6de592 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 10 Dec 2021 09:50:22 +0100 Subject: [PATCH 24/36] fix(parser): mypy support for payload type override as models (#883) Co-authored-by: Tom McCarthy --- aws_lambda_powertools/utilities/parser/models/alb.py | 6 ++++-- aws_lambda_powertools/utilities/parser/models/apigw.py | 6 +++--- aws_lambda_powertools/utilities/parser/models/apigwv2.py | 6 +++--- .../utilities/parser/models/cloudwatch.py | 6 ++++-- aws_lambda_powertools/utilities/parser/models/dynamodb.py | 8 ++++---- .../utilities/parser/models/event_bridge.py | 6 ++++-- aws_lambda_powertools/utilities/parser/models/kinesis.py | 6 +++--- aws_lambda_powertools/utilities/parser/models/s3.py | 2 +- .../utilities/parser/models/s3_object_event.py | 6 ++++-- aws_lambda_powertools/utilities/parser/models/sns.py | 6 +++--- aws_lambda_powertools/utilities/parser/models/sqs.py | 6 +++--- docs/utilities/parser.md | 2 ++ 12 files changed, 38 insertions(+), 28 deletions(-) diff --git a/aws_lambda_powertools/utilities/parser/models/alb.py b/aws_lambda_powertools/utilities/parser/models/alb.py index d4ea5fde2a1..1112d0c04e4 100644 --- a/aws_lambda_powertools/utilities/parser/models/alb.py +++ b/aws_lambda_powertools/utilities/parser/models/alb.py @@ -1,7 +1,9 @@ -from typing import Dict +from typing import Dict, Union from pydantic import BaseModel +from aws_lambda_powertools.utilities.parser.types import Model + class AlbRequestContextData(BaseModel): targetGroupArn: str @@ -14,7 +16,7 @@ class AlbRequestContext(BaseModel): class AlbModel(BaseModel): httpMethod: str path: str - body: str + body: Union[str, Model] isBase64Encoded: bool headers: Dict[str, str] queryStringParameters: Dict[str, str] diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index 283a73da9c3..ce519b8e0e3 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -1,10 +1,10 @@ from datetime import datetime -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel, root_validator from pydantic.networks import IPvAnyNetwork -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal, Model class ApiGatewayUserCertValidity(BaseModel): @@ -89,4 +89,4 @@ class APIGatewayProxyEventModel(BaseModel): pathParameters: Optional[Dict[str, str]] stageVariables: Optional[Dict[str, str]] isBase64Encoded: bool - body: Optional[str] + body: Optional[Union[str, Model]] diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index 36dd85b907e..ddaf2d7ef82 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -1,10 +1,10 @@ from datetime import datetime -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel, Field from pydantic.networks import IPvAnyNetwork -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal, Model class RequestContextV2AuthorizerIamCognito(BaseModel): @@ -67,5 +67,5 @@ class APIGatewayProxyEventV2Model(BaseModel): pathParameters: Optional[Dict[str, str]] stageVariables: Optional[Dict[str, str]] requestContext: RequestContextV2 - body: Optional[str] + body: Optional[Union[str, Model]] isBase64Encoded: bool diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py index a0fd3e37239..9b954ec3b13 100644 --- a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -3,17 +3,19 @@ import logging import zlib from datetime import datetime -from typing import List +from typing import List, Union from pydantic import BaseModel, Field, validator +from aws_lambda_powertools.utilities.parser.types import Model + logger = logging.getLogger(__name__) class CloudWatchLogsLogEvent(BaseModel): id: str # noqa AA03 VNE003 timestamp: datetime - message: str + message: Union[str, Model] class CloudWatchLogsDecode(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/dynamodb.py b/aws_lambda_powertools/utilities/parser/models/dynamodb.py index e7e3094bc9b..fe7514bada0 100644 --- a/aws_lambda_powertools/utilities/parser/models/dynamodb.py +++ b/aws_lambda_powertools/utilities/parser/models/dynamodb.py @@ -1,16 +1,16 @@ from datetime import date -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal, Model class DynamoDBStreamChangedRecordModel(BaseModel): ApproximateCreationDateTime: Optional[date] Keys: Dict[str, Dict[str, Any]] - NewImage: Optional[Dict[str, Any]] - OldImage: Optional[Dict[str, Any]] + NewImage: Optional[Union[Dict[str, Any], Model]] + OldImage: Optional[Union[Dict[str, Any], Model]] SequenceNumber: str SizeBytes: int StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] diff --git a/aws_lambda_powertools/utilities/parser/models/event_bridge.py b/aws_lambda_powertools/utilities/parser/models/event_bridge.py index a94daef0d4e..f98a263c680 100644 --- a/aws_lambda_powertools/utilities/parser/models/event_bridge.py +++ b/aws_lambda_powertools/utilities/parser/models/event_bridge.py @@ -1,8 +1,10 @@ from datetime import datetime -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel, Field +from aws_lambda_powertools.utilities.parser.types import Model + class EventBridgeModel(BaseModel): version: str @@ -13,5 +15,5 @@ class EventBridgeModel(BaseModel): region: str resources: List[str] detail_type: str = Field(None, alias="detail-type") - detail: Dict[str, Any] + detail: Union[Dict[str, Any], Model] replay_name: Optional[str] = Field(None, alias="replay-name") diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis.py b/aws_lambda_powertools/utilities/parser/models/kinesis.py index 8979d3f102f..3817cda0852 100644 --- a/aws_lambda_powertools/utilities/parser/models/kinesis.py +++ b/aws_lambda_powertools/utilities/parser/models/kinesis.py @@ -1,12 +1,12 @@ import base64 import logging from binascii import Error as BinAsciiError -from typing import List +from typing import List, Union from pydantic import BaseModel, validator from pydantic.types import PositiveInt -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal, Model logger = logging.getLogger(__name__) @@ -15,7 +15,7 @@ class KinesisDataStreamRecordPayload(BaseModel): kinesisSchemaVersion: str partitionKey: str sequenceNumber: PositiveInt - data: bytes # base64 encoded str is parsed into bytes + data: Union[bytes, Model] # base64 encoded str is parsed into bytes approximateArrivalTimestamp: float @validator("data", pre=True, allow_reuse=True) diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py index 4ec6a717f58..ae06c9f889a 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3.py +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -6,7 +6,7 @@ from pydantic.networks import IPvAnyNetwork from pydantic.types import NonNegativeFloat -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal class S3EventRecordGlacierRestoreEventData(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py index 1fc10672746..778786bc8cb 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py +++ b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py @@ -1,7 +1,9 @@ -from typing import Dict, Optional +from typing import Dict, Optional, Union from pydantic import BaseModel, HttpUrl +from aws_lambda_powertools.utilities.parser.types import Model + class S3ObjectContext(BaseModel): inputS3Url: HttpUrl @@ -12,7 +14,7 @@ class S3ObjectContext(BaseModel): class S3ObjectConfiguration(BaseModel): accessPointArn: str supportingAccessPointArn: str - payload: str + payload: Union[str, Model] class S3ObjectUserRequest(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/sns.py b/aws_lambda_powertools/utilities/parser/models/sns.py index 856757c5464..cdcd9549a98 100644 --- a/aws_lambda_powertools/utilities/parser/models/sns.py +++ b/aws_lambda_powertools/utilities/parser/models/sns.py @@ -1,10 +1,10 @@ from datetime import datetime -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union from pydantic import BaseModel, root_validator from pydantic.networks import HttpUrl -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal, Model class SnsMsgAttributeModel(BaseModel): @@ -18,7 +18,7 @@ class SnsNotificationModel(BaseModel): UnsubscribeUrl: HttpUrl Type: Literal["Notification"] MessageAttributes: Optional[Dict[str, SnsMsgAttributeModel]] - Message: str + Message: Union[str, Model] MessageId: str SigningCertUrl: HttpUrl Signature: str diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py index 184b1734f02..47871ab8840 100644 --- a/aws_lambda_powertools/utilities/parser/models/sqs.py +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -1,9 +1,9 @@ from datetime import datetime -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union from pydantic import BaseModel -from ..types import Literal +from aws_lambda_powertools.utilities.parser.types import Literal, Model class SqsAttributesModel(BaseModel): @@ -52,7 +52,7 @@ class SqsMsgAttributeModel(BaseModel): class SqsRecordModel(BaseModel): messageId: str receiptHandle: str - body: str + body: Union[str, Model] attributes: SqsAttributesModel messageAttributes: Dict[str, SqsMsgAttributeModel] md5OfBody: str diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 7c9af95896f..f32f9ba3408 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -170,6 +170,8 @@ Parser comes with the following built-in models: You can extend them to include your own models, and yet have all other known fields parsed along the way. +!!! tip "For Mypy users, we only allow type override for fields where payload is injected e.g. `detail`, `body`, etc." + **EventBridge example** === "extending_builtin_models.py" From 8406c9b37057f3e92f244270f64988b8e84fd64c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Dec 2021 11:02:45 +0000 Subject: [PATCH 25/36] chore(deps): bump fastjsonschema from 2.15.1 to 2.15.2 (#891) Bumps [fastjsonschema](https://github.com/seznam/python-fastjsonschema) from 2.15.1 to 2.15.2.
Changelog

Sourced from fastjsonschema's changelog.

=== 2.15.2 (2021-12-12)

  • Fix nested oneOf and anyOf
  • Fix if-else block in anyOf block
  • Fix uniqueItems including lists or dicts
  • Add option to pass custom formats in generated code
Commits
  • ce8603b v2.15.2
  • 8464e1c Merge pull request #112 from hh-h/fix-107
  • aec5afd Fix if-else in onyOf block
  • cd1f06e Merge pull request #126 from abravalheri/add-custom-formats-arg
  • d4a5eb2 Fix nested oneOf and anyOf
  • 7e01c7b Add custom_formats parameter when validating $ref
  • 1a54972 Add 'custom_formats' argument to generated code
  • b5b7cba resolving uniqueItems is stable now
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=fastjsonschema&package-manager=pip&previous-version=2.15.1&new-version=2.15.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index a003d0fb93b..6a6ec29f26c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -205,7 +205,7 @@ python-versions = "*" [[package]] name = "fastjsonschema" -version = "2.15.1" +version = "2.15.2" description = "Fastest Python implementation of JSON schema" category = "main" optional = false @@ -1168,8 +1168,8 @@ eradicate = [ {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, ] fastjsonschema = [ - {file = "fastjsonschema-2.15.1-py3-none-any.whl", hash = "sha256:fa2f4bb1e31419c5eb1150f2e0545921712c10c34165b86d33f08f5562ad4b85"}, - {file = "fastjsonschema-2.15.1.tar.gz", hash = "sha256:671f36d225b3493629b5e789428660109528f373cf4b8a22bac6fa2f8191c2d2"}, + {file = "fastjsonschema-2.15.2-py3-none-any.whl", hash = "sha256:5fb095151a88b166e6cda6f527ce83775bf24b3d13e1adb67c690300f3fab0a1"}, + {file = "fastjsonschema-2.15.2.tar.gz", hash = "sha256:2a84755f22eb4c944c83c18d8f40705612376d178d2a5dfe50b0ecca3c11d610"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -1529,6 +1529,10 @@ requests = [ {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, ] "ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, From e91932caffc96a3accb9c6eb2647c73381f218e0 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Wed, 15 Dec 2021 21:46:27 -0800 Subject: [PATCH 26/36] fix(event-sources): Pass authorizer data to APIGatewayEventAuthorizer (#897) --- .../data_classes/api_gateway_proxy_event.py | 17 ++++++++++++++--- .../utilities/data_classes/common.py | 4 ++-- .../events/apiGatewayProxyEventPrincipalId.json | 13 +++++++++++++ tests/functional/test_data_classes.py | 14 ++++++++++++++ 4 files changed, 43 insertions(+), 5 deletions(-) create mode 100644 tests/events/apiGatewayProxyEventPrincipalId.json diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index 34ac8d83993..adce2d4b11b 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -11,11 +11,22 @@ class APIGatewayEventAuthorizer(DictWrapper): @property def claims(self) -> Optional[Dict[str, Any]]: - return self["requestContext"]["authorizer"].get("claims") + return self.get("claims") @property def scopes(self) -> Optional[List[str]]: - return self["requestContext"]["authorizer"].get("scopes") + return self.get("scopes") + + @property + def principal_id(self) -> Optional[str]: + """The principal user identification associated with the token sent by the client and returned from an + API Gateway Lambda authorizer (formerly known as a custom authorizer)""" + return self.get("principalId") + + @property + def integration_latency(self) -> Optional[int]: + """The authorizer latency in ms.""" + return self.get("integrationLatency") class APIGatewayEventRequestContext(BaseRequestContext): @@ -56,7 +67,7 @@ def route_key(self) -> Optional[str]: @property def authorizer(self) -> APIGatewayEventAuthorizer: - return APIGatewayEventAuthorizer(self._data) + return APIGatewayEventAuthorizer(self._data["requestContext"]["authorizer"]) class APIGatewayProxyEvent(BaseProxyEvent): diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 566e1c56259..f209fc8c192 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -18,8 +18,8 @@ def __eq__(self, other: Any) -> bool: return self._data == other._data - def get(self, key: str) -> Optional[Any]: - return self._data.get(key) + def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]: + return self._data.get(key, default) @property def raw_event(self) -> Dict[str, Any]: diff --git a/tests/events/apiGatewayProxyEventPrincipalId.json b/tests/events/apiGatewayProxyEventPrincipalId.json new file mode 100644 index 00000000000..f18a2a44bbd --- /dev/null +++ b/tests/events/apiGatewayProxyEventPrincipalId.json @@ -0,0 +1,13 @@ +{ + "resource": "/trip", + "path": "/trip", + "httpMethod": "POST", + "requestContext": { + "requestId": "34972478-2843-4ced-a657-253108738274", + "authorizer": { + "user_id": "fake_username", + "principalId": "fake", + "integrationLatency": 451 + } + } +} diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index ded32639233..7a211ec2e01 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -897,6 +897,20 @@ def test_api_gateway_proxy_event(): assert request_context.identity.client_cert.subject_dn == "www.example.com" +def test_api_gateway_proxy_event_with_principal_id(): + event = APIGatewayProxyEvent(load_event("apiGatewayProxyEventPrincipalId.json")) + + request_context = event.request_context + authorizer = request_context.authorizer + assert authorizer.claims is None + assert authorizer.scopes is None + assert authorizer["principalId"] == "fake" + assert authorizer.get("principalId") == "fake" + assert authorizer.principal_id == "fake" + assert authorizer.integration_latency == 451 + assert authorizer.get("integrationStatus", "failed") == "failed" + + def test_api_gateway_proxy_v2_event(): event = APIGatewayProxyEventV2(load_event("apiGatewayProxyV2Event.json")) From 8c859deb9cf17bc2b334253ce7bc51dfe0a38c3e Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Thu, 16 Dec 2021 08:00:09 -0800 Subject: [PATCH 27/36] feat(apigateway): add exception_handler support (#898) --- .../event_handler/api_gateway.py | 63 ++++++++++++---- .../event_handler/test_api_gateway.py | 75 ++++++++++++++++++- 2 files changed, 123 insertions(+), 15 deletions(-) diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index b3d77df24b4..5bd3bc0b70e 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -10,10 +10,10 @@ from enum import Enum from functools import partial from http import HTTPStatus -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.exceptions import ServiceError +from aws_lambda_powertools.event_handler.exceptions import NotFoundError, ServiceError from aws_lambda_powertools.shared import constants from aws_lambda_powertools.shared.functions import resolve_truthy_env_var_choice from aws_lambda_powertools.shared.json_encoder import Encoder @@ -27,7 +27,6 @@ _SAFE_URI = "-._~()'!*:@,;" # https://www.ietf.org/rfc/rfc3986.txt # API GW/ALB decode non-safe URI chars; we must support them too _UNSAFE_URI = "%<>\[\]{}|^" # noqa: W605 - _NAMED_GROUP_BOUNDARY_PATTERN = fr"(?P\1[{_SAFE_URI}{_UNSAFE_URI}\\w]+)" @@ -435,6 +434,7 @@ def __init__( self._proxy_type = proxy_type self._routes: List[Route] = [] self._route_keys: List[str] = [] + self._exception_handlers: Dict[Type, Callable] = {} self._cors = cors self._cors_enabled: bool = cors is not None self._cors_methods: Set[str] = {"OPTIONS"} @@ -596,6 +596,10 @@ def _not_found(self, method: str) -> ResponseBuilder: headers["Access-Control-Allow-Methods"] = ",".join(sorted(self._cors_methods)) return ResponseBuilder(Response(status_code=204, content_type=None, headers=headers, body=None)) + handler = self._lookup_exception_handler(NotFoundError) + if handler: + return ResponseBuilder(handler(NotFoundError())) + return ResponseBuilder( Response( status_code=HTTPStatus.NOT_FOUND.value, @@ -609,16 +613,11 @@ def _call_route(self, route: Route, args: Dict[str, str]) -> ResponseBuilder: """Actually call the matching route with any provided keyword arguments.""" try: return ResponseBuilder(self._to_response(route.func(**args)), route) - except ServiceError as e: - return ResponseBuilder( - Response( - status_code=e.status_code, - content_type=content_types.APPLICATION_JSON, - body=self._json_dump({"statusCode": e.status_code, "message": e.msg}), - ), - route, - ) - except Exception: + except Exception as exc: + response_builder = self._call_exception_handler(exc, route) + if response_builder: + return response_builder + if self._debug: # If the user has turned on debug mode, # we'll let the original exception propagate so @@ -628,10 +627,46 @@ def _call_route(self, route: Route, args: Dict[str, str]) -> ResponseBuilder: status_code=500, content_type=content_types.TEXT_PLAIN, body="".join(traceback.format_exc()), - ) + ), + route, ) + raise + def not_found(self, func: Callable): + return self.exception_handler(NotFoundError)(func) + + def exception_handler(self, exc_class: Type[Exception]): + def register_exception_handler(func: Callable): + self._exception_handlers[exc_class] = func + + return register_exception_handler + + def _lookup_exception_handler(self, exp_type: Type) -> Optional[Callable]: + # Use "Method Resolution Order" to allow for matching against a base class + # of an exception + for cls in exp_type.__mro__: + if cls in self._exception_handlers: + return self._exception_handlers[cls] + return None + + def _call_exception_handler(self, exp: Exception, route: Route) -> Optional[ResponseBuilder]: + handler = self._lookup_exception_handler(type(exp)) + if handler: + return ResponseBuilder(handler(exp), route) + + if isinstance(exp, ServiceError): + return ResponseBuilder( + Response( + status_code=exp.status_code, + content_type=content_types.APPLICATION_JSON, + body=self._json_dump({"statusCode": exp.status_code, "message": exp.msg}), + ), + route, + ) + + return None + def _to_response(self, result: Union[Dict, Response]) -> Response: """Convert the route's result to a Response diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index f28752e6de6..45b1e3f41a4 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -163,7 +163,7 @@ def patch_func(): def handler(event, context): return app.resolve(event, context) - # Also check check the route configurations + # Also check the route configurations routes = app._routes assert len(routes) == 5 for route in routes: @@ -1076,3 +1076,76 @@ def foo(): assert result["statusCode"] == 200 assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + + +def test_exception_handler(): + # GIVEN a resolver with an exception handler defined for ValueError + app = ApiGatewayResolver() + + @app.exception_handler(ValueError) + def handle_value_error(ex: ValueError): + print(f"request path is '{app.current_event.path}'") + return Response( + status_code=418, + content_type=content_types.TEXT_HTML, + body=str(ex), + ) + + @app.get("/my/path") + def get_lambda() -> Response: + raise ValueError("Foo!") + + # WHEN calling the event handler + # AND a ValueError is raised + result = app(LOAD_GW_EVENT, {}) + + # THEN call the exception_handler + assert result["statusCode"] == 418 + assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["body"] == "Foo!" + + +def test_exception_handler_service_error(): + # GIVEN + app = ApiGatewayResolver() + + @app.exception_handler(ServiceError) + def service_error(ex: ServiceError): + print(ex.msg) + return Response( + status_code=ex.status_code, + content_type=content_types.APPLICATION_JSON, + body="CUSTOM ERROR FORMAT", + ) + + @app.get("/my/path") + def get_lambda() -> Response: + raise InternalServerError("Something sensitive") + + # WHEN calling the event handler + # AND a ServiceError is raised + result = app(LOAD_GW_EVENT, {}) + + # THEN call the exception_handler + assert result["statusCode"] == 500 + assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["body"] == "CUSTOM ERROR FORMAT" + + +def test_exception_handler_not_found(): + # GIVEN a resolver with an exception handler defined for a 404 not found + app = ApiGatewayResolver() + + @app.not_found + def handle_not_found(exc: NotFoundError) -> Response: + assert isinstance(exc, NotFoundError) + return Response(status_code=404, content_type=content_types.TEXT_PLAIN, body="I am a teapot!") + + # WHEN calling the event handler + # AND not route is found + result = app(LOAD_GW_EVENT, {}) + + # THEN call the exception_handler + assert result["statusCode"] == 404 + assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert result["body"] == "I am a teapot!" From 99227ce6cbcd201f9da0e2140dab5748e3468dbe Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 17 Dec 2021 15:20:20 +0100 Subject: [PATCH 28/36] fix(parser): kinesis sequence number is str, not int (#907) --- aws_lambda_powertools/utilities/parser/models/kinesis.py | 3 +-- tests/functional/parser/test_kinesis.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis.py b/aws_lambda_powertools/utilities/parser/models/kinesis.py index 3817cda0852..1c7c31c97b4 100644 --- a/aws_lambda_powertools/utilities/parser/models/kinesis.py +++ b/aws_lambda_powertools/utilities/parser/models/kinesis.py @@ -4,7 +4,6 @@ from typing import List, Union from pydantic import BaseModel, validator -from pydantic.types import PositiveInt from aws_lambda_powertools.utilities.parser.types import Literal, Model @@ -14,7 +13,7 @@ class KinesisDataStreamRecordPayload(BaseModel): kinesisSchemaVersion: str partitionKey: str - sequenceNumber: PositiveInt + sequenceNumber: str data: Union[bytes, Model] # base64 encoded str is parsed into bytes approximateArrivalTimestamp: float diff --git a/tests/functional/parser/test_kinesis.py b/tests/functional/parser/test_kinesis.py index 632a7463805..552cb6cef68 100644 --- a/tests/functional/parser/test_kinesis.py +++ b/tests/functional/parser/test_kinesis.py @@ -35,7 +35,7 @@ def handle_kinesis_no_envelope(event: KinesisDataStreamModel, _: LambdaContext): assert kinesis.approximateArrivalTimestamp == 1545084650.987 assert kinesis.kinesisSchemaVersion == "1.0" assert kinesis.partitionKey == "1" - assert kinesis.sequenceNumber == 49590338271490256608559692538361571095921575989136588898 + assert kinesis.sequenceNumber == "49590338271490256608559692538361571095921575989136588898" assert kinesis.data == b"Hello, this is a test." From 5362a164e00276d136a4f73f1ce8936b5b6d34b7 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 17 Dec 2021 22:59:37 +0100 Subject: [PATCH 29/36] fix(parser): overload parse when using envelope (#885) --- .../utilities/parser/envelopes/base.py | 2 +- .../utilities/parser/parser.py | 26 ++++++++++++------- .../utilities/parser/types.py | 2 ++ 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/base.py b/aws_lambda_powertools/utilities/parser/envelopes/base.py index 06e78160d87..85486fdd876 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/base.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/base.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from typing import Any, Dict, Optional, Type, TypeVar, Union -from ..types import Model +from aws_lambda_powertools.utilities.parser.types import Model logger = logging.getLogger(__name__) diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py index 7a7f6aec56b..a12f163f8a6 100644 --- a/aws_lambda_powertools/utilities/parser/parser.py +++ b/aws_lambda_powertools/utilities/parser/parser.py @@ -1,25 +1,23 @@ import logging -from typing import Any, Callable, Dict, Optional, Type, TypeVar, Union +from typing import Any, Callable, Dict, Optional, Type, overload + +from aws_lambda_powertools.utilities.parser.types import EnvelopeModel, EventParserReturnType, Model from ...middleware_factory import lambda_handler_decorator from ..typing import LambdaContext from .envelopes.base import Envelope from .exceptions import InvalidEnvelopeError, InvalidModelTypeError -from .types import Model logger = logging.getLogger(__name__) -EventParserReturnType = TypeVar("EventParserReturnType") - - @lambda_handler_decorator def event_parser( handler: Callable[[Any, LambdaContext], EventParserReturnType], event: Dict[str, Any], context: LambdaContext, model: Type[Model], - envelope: Optional[Union[Envelope, Type[Envelope]]] = None, + envelope: Optional[Type[Envelope]] = None, ) -> EventParserReturnType: """Lambda handler decorator to parse & validate events using Pydantic models @@ -81,14 +79,22 @@ def handler(event: Order, context: LambdaContext): InvalidEnvelopeError When envelope given does not implement BaseEnvelope """ - parsed_event = parse(event=event, model=model, envelope=envelope) + parsed_event = parse(event=event, model=model, envelope=envelope) if envelope else parse(event=event, model=model) logger.debug(f"Calling handler {handler.__name__}") return handler(parsed_event, context) -def parse( - event: Dict[str, Any], model: Type[Model], envelope: Optional[Union[Envelope, Type[Envelope]]] = None -) -> Model: +@overload +def parse(event: Dict[str, Any], model: Type[Model]) -> Model: + ... + + +@overload +def parse(event: Dict[str, Any], model: Type[Model], envelope: Type[Envelope]) -> EnvelopeModel: + ... + + +def parse(event: Dict[str, Any], model: Type[Model], envelope: Optional[Type[Envelope]] = None): """Standalone function to parse & validate events using Pydantic models Typically used when you need fine-grained control over error handling compared to event_parser decorator. diff --git a/aws_lambda_powertools/utilities/parser/types.py b/aws_lambda_powertools/utilities/parser/types.py index 2565e52c764..20958bd9c21 100644 --- a/aws_lambda_powertools/utilities/parser/types.py +++ b/aws_lambda_powertools/utilities/parser/types.py @@ -12,3 +12,5 @@ from typing_extensions import Literal # noqa: F401 Model = TypeVar("Model", bound=BaseModel) +EnvelopeModel = TypeVar("EnvelopeModel") +EventParserReturnType = TypeVar("EventParserReturnType") From c74811b7174c0e9af306ec624fb2d8adbe1f3648 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Sun, 19 Dec 2021 19:50:57 +0100 Subject: [PATCH 30/36] feat(batch): new BatchProcessor for SQS, DynamoDB, Kinesis (#886) --- .pre-commit-config.yaml | 4 - .../utilities/batch/__init__.py | 24 +- aws_lambda_powertools/utilities/batch/base.py | 320 +++- .../utilities/batch/exceptions.py | 34 +- docs/utilities/batch.md | 1346 +++++++++++++++-- tests/functional/test_utilities_batch.py | 552 ++++++- tests/functional/utils.py | 9 + tests/utils.py | 0 8 files changed, 2129 insertions(+), 160 deletions(-) create mode 100644 tests/utils.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f42337d5c5b..61e98378017 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,10 +11,6 @@ repos: - id: trailing-whitespace - id: end-of-file-fixer - id: check-toml - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.5.1 - hooks: - - id: python-use-type-annotations - repo: local hooks: - id: black diff --git a/aws_lambda_powertools/utilities/batch/__init__.py b/aws_lambda_powertools/utilities/batch/__init__.py index d308a56abda..584342e5fd0 100644 --- a/aws_lambda_powertools/utilities/batch/__init__.py +++ b/aws_lambda_powertools/utilities/batch/__init__.py @@ -4,7 +4,25 @@ Batch processing utility """ -from .base import BasePartialProcessor, batch_processor -from .sqs import PartialSQSProcessor, sqs_batch_processor +from aws_lambda_powertools.utilities.batch.base import ( + BasePartialProcessor, + BatchProcessor, + EventType, + ExceptionInfo, + FailureResponse, + SuccessResponse, + batch_processor, +) +from aws_lambda_powertools.utilities.batch.sqs import PartialSQSProcessor, sqs_batch_processor -__all__ = ("BasePartialProcessor", "PartialSQSProcessor", "batch_processor", "sqs_batch_processor") +__all__ = ( + "BatchProcessor", + "BasePartialProcessor", + "ExceptionInfo", + "EventType", + "FailureResponse", + "PartialSQSProcessor", + "SuccessResponse", + "batch_processor", + "sqs_batch_processor", +) diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index a0ad18a9ec1..02eb00ffaed 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -3,24 +3,64 @@ """ Batch processing utilities """ - +import copy import logging +import sys from abc import ABC, abstractmethod -from typing import Any, Callable, Dict, List, Tuple +from enum import Enum +from types import TracebackType +from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union, overload from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord +from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord logger = logging.getLogger(__name__) +class EventType(Enum): + SQS = "SQS" + KinesisDataStreams = "KinesisDataStreams" + DynamoDBStreams = "DynamoDBStreams" + + +# +# type specifics +# +has_pydantic = "pydantic" in sys.modules +ExceptionInfo = Tuple[Type[BaseException], BaseException, TracebackType] +OptExcInfo = Union[ExceptionInfo, Tuple[None, None, None]] + +# For IntelliSense and Mypy to work, we need to account for possible SQS, Kinesis and DynamoDB subclasses +# We need them as subclasses as we must access their message ID or sequence number metadata via dot notation +if has_pydantic: + from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamRecordModel + from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord as KinesisDataStreamRecordModel + from aws_lambda_powertools.utilities.parser.models import SqsRecordModel + + BatchTypeModels = Optional[ + Union[Type[SqsRecordModel], Type[DynamoDBStreamRecordModel], Type[KinesisDataStreamRecordModel]] + ] + +# When using processor with default arguments, records will carry EventSourceDataClassTypes +# and depending on what EventType it's passed it'll correctly map to the right record +# When using Pydantic Models, it'll accept any subclass from SQS, DynamoDB and Kinesis +EventSourceDataClassTypes = Union[SQSRecord, KinesisStreamRecord, DynamoDBRecord] +BatchEventTypes = Union[EventSourceDataClassTypes, "BatchTypeModels"] +SuccessResponse = Tuple[str, Any, BatchEventTypes] +FailureResponse = Tuple[str, str, BatchEventTypes] + + class BasePartialProcessor(ABC): """ Abstract class for batch processors. """ def __init__(self): - self.success_messages: List = [] - self.fail_messages: List = [] + self.success_messages: List[BatchEventTypes] = [] + self.fail_messages: List[BatchEventTypes] = [] self.exceptions: List = [] @abstractmethod @@ -38,7 +78,7 @@ def _clean(self): raise NotImplementedError() @abstractmethod - def _process_record(self, record: Any): + def _process_record(self, record: dict): """ Process record with handler. """ @@ -57,13 +97,13 @@ def __enter__(self): def __exit__(self, exception_type, exception_value, traceback): self._clean() - def __call__(self, records: List[Any], handler: Callable): + def __call__(self, records: List[dict], handler: Callable): """ Set instance attributes before execution Parameters ---------- - records: List[Any] + records: List[dict] List with objects to be processed. handler: Callable Callable to process "records" entries. @@ -72,26 +112,40 @@ def __call__(self, records: List[Any], handler: Callable): self.handler = handler return self - def success_handler(self, record: Any, result: Any): + def success_handler(self, record, result: Any) -> SuccessResponse: """ - Success callback + Keeps track of batch records that were processed successfully + + Parameters + ---------- + record: Any + record that failed processing + result: Any + result from record handler Returns ------- - tuple + SuccessResponse "success", result, original record """ entry = ("success", result, record) self.success_messages.append(record) return entry - def failure_handler(self, record: Any, exception: Tuple): + def failure_handler(self, record, exception: OptExcInfo) -> FailureResponse: """ - Failure callback + Keeps track of batch records that failed processing + + Parameters + ---------- + record: Any + record that failed processing + exception: OptExcInfo + Exception information containing type, value, and traceback (sys.exc_info()) Returns ------- - tuple + FailureResponse "fail", exceptions args, original record """ exception_string = f"{exception[0]}:{exception[1]}" @@ -146,3 +200,243 @@ def batch_processor( processor.process() return handler(event, context) + + +class BatchProcessor(BasePartialProcessor): + """Process native partial responses from SQS, Kinesis Data Streams, and DynamoDB. + + + Example + ------- + + ## Process batch triggered by SQS + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.SQS) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + + ## Process batch triggered by Kinesis Data Streams + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + + + ## Process batch triggered by DynamoDB Data Streams + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: DynamoDBRecord): + logger.info(record.dynamodb.new_image) + payload: dict = json.loads(record.dynamodb.new_image.get("item").s_value) + # alternatively: + # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image # noqa: E800 + # payload = change.get("Message").raw_event -> {"S": ""} + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, processor=processor): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() + ``` + + + Raises + ------ + BatchProcessingError + When all batch records fail processing + """ + + DEFAULT_RESPONSE: Dict[str, List[Optional[dict]]] = {"batchItemFailures": []} + + def __init__(self, event_type: EventType, model: Optional["BatchTypeModels"] = None): + """Process batch and partially report failed items + + Parameters + ---------- + event_type: EventType + Whether this is a SQS, DynamoDB Streams, or Kinesis Data Stream event + model: Optional["BatchTypeModels"] + Parser's data model using either SqsRecordModel, DynamoDBStreamRecordModel, KinesisDataStreamRecord + + Exceptions + ---------- + BatchProcessingError + Raised when the entire batch has failed processing + """ + self.event_type = event_type + self.model = model + self.batch_response = copy.deepcopy(self.DEFAULT_RESPONSE) + self._COLLECTOR_MAPPING = { + EventType.SQS: self._collect_sqs_failures, + EventType.KinesisDataStreams: self._collect_kinesis_failures, + EventType.DynamoDBStreams: self._collect_dynamodb_failures, + } + self._DATA_CLASS_MAPPING = { + EventType.SQS: SQSRecord, + EventType.KinesisDataStreams: KinesisStreamRecord, + EventType.DynamoDBStreams: DynamoDBRecord, + } + + super().__init__() + + def response(self): + """Batch items that failed processing, if any""" + return self.batch_response + + def _prepare(self): + """ + Remove results from previous execution. + """ + self.success_messages.clear() + self.fail_messages.clear() + self.batch_response = copy.deepcopy(self.DEFAULT_RESPONSE) + + def _process_record(self, record: dict) -> Union[SuccessResponse, FailureResponse]: + """ + Process a record with instance's handler + + Parameters + ---------- + record: dict + A batch record to be processed. + """ + data = self._to_batch_type(record=record, event_type=self.event_type, model=self.model) + try: + result = self.handler(record=data) + return self.success_handler(record=record, result=result) + except Exception: + return self.failure_handler(record=data, exception=sys.exc_info()) + + def _clean(self): + """ + Report messages to be deleted in case of partial failure. + """ + + if not self._has_messages_to_report(): + return + + if self._entire_batch_failed(): + raise BatchProcessingError( + msg=f"All records failed processing. {len(self.exceptions)} individual errors logged" + f"separately below.", + child_exceptions=self.exceptions, + ) + + messages = self._get_messages_to_report() + self.batch_response = {"batchItemFailures": [messages]} + + def _has_messages_to_report(self) -> bool: + if self.fail_messages: + return True + + logger.debug(f"All {len(self.success_messages)} records successfully processed") + return False + + def _entire_batch_failed(self) -> bool: + return len(self.exceptions) == len(self.records) + + def _get_messages_to_report(self) -> Dict[str, str]: + """ + Format messages to use in batch deletion + """ + return self._COLLECTOR_MAPPING[self.event_type]() + + # Event Source Data Classes follow python idioms for fields + # while Parser/Pydantic follows the event field names to the latter + def _collect_sqs_failures(self): + if self.model: + return {"itemIdentifier": msg.messageId for msg in self.fail_messages} + else: + return {"itemIdentifier": msg.message_id for msg in self.fail_messages} + + def _collect_kinesis_failures(self): + if self.model: + # Pydantic model uses int but Lambda poller expects str + return {"itemIdentifier": msg.kinesis.sequenceNumber for msg in self.fail_messages} + else: + return {"itemIdentifier": msg.kinesis.sequence_number for msg in self.fail_messages} + + def _collect_dynamodb_failures(self): + if self.model: + return {"itemIdentifier": msg.dynamodb.SequenceNumber for msg in self.fail_messages} + else: + return {"itemIdentifier": msg.dynamodb.sequence_number for msg in self.fail_messages} + + @overload + def _to_batch_type(self, record: dict, event_type: EventType, model: "BatchTypeModels") -> "BatchTypeModels": + ... + + @overload + def _to_batch_type(self, record: dict, event_type: EventType) -> EventSourceDataClassTypes: + ... + + def _to_batch_type(self, record: dict, event_type: EventType, model: Optional["BatchTypeModels"] = None): + if model is not None: + return model.parse_obj(record) + else: + return self._DATA_CLASS_MAPPING[event_type](record) diff --git a/aws_lambda_powertools/utilities/batch/exceptions.py b/aws_lambda_powertools/utilities/batch/exceptions.py index c2ead04a7b1..fe51433a5d6 100644 --- a/aws_lambda_powertools/utilities/batch/exceptions.py +++ b/aws_lambda_powertools/utilities/batch/exceptions.py @@ -2,20 +2,16 @@ Batch processing exceptions """ import traceback +from typing import Optional, Tuple -class SQSBatchProcessingError(Exception): - """When at least one message within a batch could not be processed""" - +class BaseBatchProcessingError(Exception): def __init__(self, msg="", child_exceptions=()): super().__init__(msg) self.msg = msg self.child_exceptions = child_exceptions - # Overriding this method so we can output all child exception tracebacks when we raise this exception to prevent - # errors being lost. See https://github.com/awslabs/aws-lambda-powertools-python/issues/275 - def __str__(self): - parent_exception_str = super(SQSBatchProcessingError, self).__str__() + def format_exceptions(self, parent_exception_str): exception_list = [f"{parent_exception_str}\n"] for exception in self.child_exceptions: extype, ex, tb = exception @@ -23,3 +19,27 @@ def __str__(self): exception_list.append(formatted) return "\n".join(exception_list) + + +class SQSBatchProcessingError(BaseBatchProcessingError): + """When at least one message within a batch could not be processed""" + + def __init__(self, msg="", child_exceptions: Optional[Tuple[Exception]] = None): + super().__init__(msg, child_exceptions) + + # Overriding this method so we can output all child exception tracebacks when we raise this exception to prevent + # errors being lost. See https://github.com/awslabs/aws-lambda-powertools-python/issues/275 + def __str__(self): + parent_exception_str = super(SQSBatchProcessingError, self).__str__() + return self.format_exceptions(parent_exception_str) + + +class BatchProcessingError(BaseBatchProcessingError): + """When all batch records failed to be processed""" + + def __init__(self, msg="", child_exceptions: Optional[Tuple[Exception]] = None): + super().__init__(msg, child_exceptions) + + def __str__(self): + parent_exception_str = super(BatchProcessingError, self).__str__() + return self.format_exceptions(parent_exception_str) diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 56ab160e9f9..3ea9413749e 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -1,33 +1,642 @@ --- -title: SQS Batch Processing +title: Batch Processing description: Utility --- -The SQS batch processing utility provides a way to handle partial failures when processing batches of messages from SQS. +The batch processing utility handles partial failures when processing batches from Amazon SQS, Amazon Kinesis Data Streams, and Amazon DynamoDB Streams. ## Key Features -* Prevent successfully processed messages being returned to SQS -* Simple interface for individually processing messages from a batch -* Build your own batch processor using the base classes +* Reports batch item failures to reduce number of retries for a record upon errors +* Simple interface to process each batch record +* Integrates with [Event Source Data Classes](./data_classes.md){target="_blank} and [Parser (Pydantic)](parser.md){target="_blank} for self-documenting record schema +* Build your own batch processor by extending primitives ## Background -When using SQS as a Lambda event source mapping, Lambda functions are triggered with a batch of messages from SQS. +When using SQS, Kinesis Data Streams, or DynamoDB Streams as a Lambda event source, your Lambda functions are triggered with a batch of messages. -If your function fails to process any message from the batch, the entire batch returns to your SQS queue, and your Lambda function is triggered with the same batch one more time. +If your function fails to process any message from the batch, the entire batch returns to your queue or stream. This same batch is then retried until either condition happens first: **a)** your Lambda function returns a successful response, **b)** record reaches maximum retry attempts, or **c)** when records expire. -With this utility, messages within a batch are handled individually - only messages that were not successfully processed -are returned to the queue. +With this utility, batch records are processed individually – only messages that failed to be processed return to the queue or stream for a further retry. This works when two mechanisms are in place: -!!! warning - While this utility lowers the chance of processing messages more than once, it is not guaranteed. We recommend implementing processing logic in an idempotent manner wherever possible. +1. `ReportBatchItemFailures` is set in your SQS, Kinesis, or DynamoDB event source properties +2. [A specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"} is returned so Lambda knows which records should not be deleted during partial responses + +!!! warning "This utility lowers the chance of processing records more than once; it does not guarantee it" + We recommend implementing processing logic in an [idempotent manner](idempotency.md){target="_blank"} wherever possible. - More details on how Lambda works with SQS can be found in the [AWS documentation](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html) + You can find more details on how Lambda works with either [SQS](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html){target="_blank"}, [Kinesis](https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html){target="_blank"}, or [DynamoDB](https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html){target="_blank"} in the AWS Documentation. ## Getting started -### IAM Permissions +Regardless whether you're using SQS, Kinesis Data Streams or DynamoDB Streams, you must configure your Lambda function event source to use ``ReportBatchItemFailures`. + +You do not need any additional IAM permissions to use this utility, except for what each event source requires. + +### Required resources + +The remaining sections of the documentation will rely on these samples. For completeness, this demonstrates IAM permissions and Dead Letter Queue where batch records will be sent after 2 retries were attempted. + + +=== "SQS" + + ```yaml title="template.yaml" hl_lines="31-32" + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: partial batch response sample + + Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.8 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + + Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + - SQSPollerPolicy: + QueueName: !GetAtt SampleQueue.QueueName + Events: + Batch: + Type: SQS + Properties: + Queue: !GetAtt SampleQueue.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleQueue: + Type: AWS::SQS::Queue + Properties: + VisibilityTimeout: 30 # Fn timeout * 6 + RedrivePolicy: + maxReceiveCount: 2 + deadLetterTargetArn: !GetAtt SampleDLQ.Arn + ``` + +=== "Kinesis Data Streams" + + ```yaml title="template.yaml" hl_lines="44-45" + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: partial batch response sample + + Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.8 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + + Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + # Lambda Destinations require additional permissions + # to send failure records to DLQ from Kinesis/DynamoDB + - Version: "2012-10-17" + Statement: + Effect: "Allow" + Action: + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + Resource: !GetAtt SampleDLQ.Arn + Events: + KinesisStream: + Type: Kinesis + Properties: + Stream: !GetAtt SampleStream.Arn + BatchSize: 100 + StartingPosition: LATEST + MaximumRetryAttempts: 2 + DestinationConfig: + OnFailure: + Destination: !GetAtt SampleDLQ.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleStream: + Type: AWS::Kinesis::Stream + Properties: + ShardCount: 1 + ``` + +=== "DynamoDB Streams" + + ```yaml title="template.yaml" hl_lines="43-44" + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: partial batch response sample + + Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.8 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + + Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + # Lambda Destinations require additional permissions + # to send failure records from Kinesis/DynamoDB + - Version: "2012-10-17" + Statement: + Effect: "Allow" + Action: + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + Resource: !GetAtt SampleDLQ.Arn + Events: + DynamoDBStream: + Type: DynamoDB + Properties: + Stream: !GetAtt SampleTable.StreamArn + StartingPosition: LATEST + MaximumRetryAttempts: 2 + DestinationConfig: + OnFailure: + Destination: !GetAtt SampleDLQ.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleTable: + Type: AWS::DynamoDB::Table + Properties: + BillingMode: PAY_PER_REQUEST + AttributeDefinitions: + - AttributeName: pk + AttributeType: S + - AttributeName: sk + AttributeType: S + KeySchema: + - AttributeName: pk + KeyType: HASH + - AttributeName: sk + KeyType: RANGE + SSESpecification: + SSEEnabled: yes + StreamSpecification: + StreamViewType: NEW_AND_OLD_IMAGES + + ``` + +### Processing messages from SQS + +Processing batches from SQS works in four stages: + +1. Instantiate **`BatchProcessor`** and choose **`EventType.SQS`** for the event type +2. Define your function to handle each batch record, and use [`SQSRecord`](data_classes.md#sqs){target="_blank"} type annotation for autocompletion +3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing +4. Return the appropriate response contract to Lambda via **`.response()`** processor method + +!!! info "This code example optionally uses Tracer and Logger for completion" + +=== "As a decorator" + + ```python hl_lines="4-5 9 15 23 25" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.SQS) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +=== "As a context manager" + + ```python hl_lines="4-5 9 15 24-26 28" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.SQS) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, processor=processor): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() + ``` + +=== "Sample response" + + The second record failed to be processed, therefore the processor added its message ID in the response. + + ```python + { + 'batchItemFailures': [ + { + 'itemIdentifier': '244fc6b4-87a3-44ab-83d2-361172410c3a' + } + ] + } + ``` + +=== "Sample event" + + ```json + { + "Records": [ + { + "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "{\"Message\": \"success\"}", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + }, + { + "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg==", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + } + ] + } + ``` + +### Processing messages from Kinesis + +Processing batches from Kinesis works in four stages: + +1. Instantiate **`BatchProcessor`** and choose **`EventType.KinesisDataStreams`** for the event type +2. Define your function to handle each batch record, and use [`KinesisStreamRecord`](data_classes.md#kinesis-streams){target="_blank"} type annotation for autocompletion +3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing +4. Return the appropriate response contract to Lambda via **`.response()`** processor method + +!!! info "This code example optionally uses Tracer and Logger for completion" + +=== "As a decorator" + + ```python hl_lines="4-5 9 15 22 24" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +=== "As a context manager" + + ```python hl_lines="4-5 9 15 23-25 27" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, processor=processor): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() + ``` + +=== "Sample response" + + The second record failed to be processed, therefore the processor added its sequence number in the response. + + ```python + { + 'batchItemFailures': [ + { + 'itemIdentifier': '6006958808509702859251049540584488075644979031228738' + } + ] + } + ``` + + +=== "Sample event" + + ```json + { + "Records": [ + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "4107859083838847772757075850904226111829882106684065", + "data": "eyJNZXNzYWdlIjogInN1Y2Nlc3MifQ==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:4107859083838847772757075850904226111829882106684065", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + }, + { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": "6006958808509702859251049540584488075644979031228738", + "data": "c3VjY2Vzcw==", + "approximateArrivalTimestamp": 1545084650.987 + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": "shardId-000000000006:6006958808509702859251049540584488075644979031228738", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream" + } + ] + } + ``` + + +### Processing messages from DynamoDB + +Processing batches from Kinesis works in four stages: + +1. Instantiate **`BatchProcessor`** and choose **`EventType.DynamoDBStreams`** for the event type +2. Define your function to handle each batch record, and use [`DynamoDBRecord`](data_classes.md#dynamodb-streams){target="_blank"} type annotation for autocompletion +3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing +4. Return the appropriate response contract to Lambda via **`.response()`** processor method + +!!! info "This code example optionally uses Tracer and Logger for completion" + +=== "As a decorator" + + ```python hl_lines="4-5 9 15 25 27" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: DynamoDBRecord): + logger.info(record.dynamodb.new_image) + payload: dict = json.loads(record.dynamodb.new_image.get("Message").get_value) + # alternatively: + # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image + # payload = change.get("Message").raw_event -> {"S": ""} + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +=== "As a context manager" + + ```python hl_lines="4-5 9 15 26-28 30" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: DynamoDBRecord): + logger.info(record.dynamodb.new_image) + payload: dict = json.loads(record.dynamodb.new_image.get("item").s_value) + # alternatively: + # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image + # payload = change.get("Message").raw_event -> {"S": ""} + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, processor=processor): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() + ``` + +=== "Sample response" + + The second record failed to be processed, therefore the processor added its sequence number in the response. + + ```python + { + 'batchItemFailures': [ + { + 'itemIdentifier': '8640712661' + } + ] + } + ``` + + +=== "Sample event" + + ```json + { + "Records": [ + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "Message": { + "S": "failure" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "3275880929", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + }, + { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": { + "Id": { + "N": "101" + } + }, + "NewImage": { + "SomethingElse": { + "S": "success" + } + }, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": "8640712661", + "SizeBytes": 26 + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb" + } + ] + } + ``` + +### Partial failure mechanics + +All records in the batch will be passed to this handler for processing, even if exceptions are thrown - Here's the behaviour after completing the batch: + +* **All records successfully processed**. We will return an empty list of item failures `{'batchItemFailures': []}` +* **Partial success with some exceptions**. We will return a list of all item IDs/sequence numbers that failed processing +* **All records failed to be processed**. We will raise `BatchProcessingError` exception with a list of all exceptions raised when processing + +!!! warning + You will not have access to the **processed messages** within the Lambda Handler; use context manager for that. + + All processing logic will and should be performed by the `record_handler` function. + + + -### Partial failure mechanics -All records in the batch will be passed to this handler for processing, even if exceptions are thrown - Here's the behaviour after completing the batch: +## Advanced -* **Any successfully processed messages**, we will delete them from the queue via `sqs:DeleteMessageBatch` -* **Any unprocessed messages detected**, we will raise `SQSBatchProcessingError` to ensure failed messages return to your SQS queue +### Pydantic integration -!!! warning - You will not have accessed to the **processed messages** within the Lambda Handler. +You can bring your own Pydantic models via **`model`** parameter when inheriting from **`SqsRecordModel`**, **`KinesisDataStreamRecord`**, or **`DynamoDBStreamRecordModel`** - All processing logic will and should be performed by the `record_handler` function. +Inheritance is importance because we need to access message IDs and sequence numbers from these records in the event of failure. Mypy is fully integrated with this utility, so it should identify whether you're passing the incorrect Model. -## Advanced -### Choosing between decorator and context manager +=== "SQS" + + ```python hl_lines="5 9-10 12-19 21 27" + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.parser.models import SqsRecordModel + from aws_lambda_powertools.utilities.typing import LambdaContext + + + class Order(BaseModel): + item: dict + + class OrderSqsRecord(SqsRecordModel): + body: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("body", pre=True) + def transform_body_to_dict(cls, value: str): + return json.loads(value) + + processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqsRecord) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: OrderSqsRecord): + return record.body.item + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +=== "Kinesis Data Streams" + + ```python hl_lines="5 9-10 12-20 22-23 26 32" + import json -They have nearly the same behaviour when it comes to processing messages from the batch: + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord + from aws_lambda_powertools.utilities.typing import LambdaContext -* **Entire batch has been successfully processed**, where your Lambda handler returned successfully, we will let SQS delete the batch to optimize your cost -* **Entire Batch has been partially processed successfully**, where exceptions were raised within your `record handler`, we will: - * **1)** Delete successfully processed messages from the queue by directly calling `sqs:DeleteMessageBatch` - * **2)** Raise `SQSBatchProcessingError` to ensure failed messages return to your SQS queue -The only difference is that **PartialSQSProcessor** will give you access to processed messages if you need. + class Order(BaseModel): + item: dict + + class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): + data: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("data", pre=True) + def transform_message_to_dict(cls, value: str): + # Powertools KinesisDataStreamRecordModel already decodes b64 to str here + return json.loads(value) + + class OrderKinesisRecord(KinesisDataStreamRecordModel): + kinesis: OrderKinesisPayloadRecord + + + processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: OrderKinesisRecord): + return record.kinesis.data.item + + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +=== "DynamoDB Streams" + + ```python hl_lines="7 11-12 14-21 23-25 27-28 31 37" + import json + + from typing import Dict, Literal + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamRecordModel + from aws_lambda_powertools.utilities.typing import LambdaContext + + + class Order(BaseModel): + item: dict + + class OrderDynamoDB(BaseModel): + Message: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("Message", pre=True) + def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): + return json.loads(value["S"]) + + class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): + NewImage: Optional[OrderDynamoDB] + OldImage: Optional[OrderDynamoDB] + + class OrderDynamoDBRecord(DynamoDBStreamRecordModel): + dynamodb: OrderDynamoDBChangeRecord + + + processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderKinesisRecord) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: OrderDynamoDBRecord): + return record.dynamodb.NewImage.Message.item + + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` ### Accessing processed messages -Use `PartialSQSProcessor` context manager to access a list of all return values from your `record_handler` function. +Use the context manager to access a list of all returned values from your `record_handler` function. + +> Signature: List[Tuple[Union[SuccessResponse, FailureResponse]]] + +* **When successful**. We will include a tuple with `success`, the result of `record_handler`, and the batch record +* **When failed**. We will include a tuple with `fail`, exception as a string, and the batch record + + +=== "app.py" + + ```python hl_lines="31-38" + import json + + from typing import Any, List, Literal, Union + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import (BatchProcessor, + EventType, + FailureResponse, + SuccessResponse, + batch_processor) + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.SQS) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, processor=processor): + processed_messages: List[Union[SuccessResponse, FailureResponse]] = processor.process() + + for messages in processed_messages: + for message in messages: + status: Union[Literal["success"], Literal["fail"]] = message[0] + result: Any = message[1] + record: SQSRecord = message[2] + + + return processor.response() + ``` + + +### Extending BatchProcessor + +You might want to bring custom logic to the existing `BatchProcessor` to slightly override how we handle successes and failures. + +For these scenarios, you can subclass `BatchProcessor` and quickly override `success_handler` and `failure_handler` methods: + +* **`success_handler()`** – Keeps track of successful batch records +* **`failure_handler()`** – Keeps track of failed batch records + +**Example** + +Let's suppose you'd like to add a metric named `BatchRecordFailures` for each batch record that failed processing: === "app.py" ```python - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + + from typing import Tuple + + from aws_lambda_powertools import Metrics + from aws_lambda_powertools.metrics import MetricUnit + from aws_lambda_powertools.utilities.batch import batch_processor, BatchProcessor, ExceptionInfo, EventType, FailureResponse + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + + + class MyProcessor(BatchProcessor): + def failure_handler(self, record: SQSRecord, exception: ExceptionInfo) -> FailureResponse: + metrics.add_metric(name="BatchRecordFailures", unit=MetricUnit.Count, value=1) + return super().failure_handler(record, exception) + + processor = MyProcessor(event_type=EventType.SQS) + metrics = Metrics(namespace="test") + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @metrics.log_metrics(capture_cold_start_metric=True) + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +### Create your own partial processor + +You can create your own partial batch processor from scratch by inheriting the `BasePartialProcessor` class, and implementing `_prepare()`, `_clean()` and `_process_record()`. + +* **`_process_record()`** – handles all processing logic for each individual message of a batch, including calling the `record_handler` (self.handler) +* **`_prepare()`** – called once as part of the processor initialization +* **`clean()`** – teardown logic called once after `_process_record` completes + +You can then use this class as a context manager, or pass it to `batch_processor` to use as a decorator on your Lambda handler function. + +=== "custom_processor.py" + + ```python hl_lines="3 9 24 30 37 57" + from random import randint + + from aws_lambda_powertools.utilities.batch import BasePartialProcessor, batch_processor + import boto3 + import os + + table_name = os.getenv("TABLE_NAME", "table_not_found") + + class MyPartialProcessor(BasePartialProcessor): + """ + Process a record and stores successful results at a Amazon DynamoDB Table + + Parameters + ---------- + table_name: str + DynamoDB table name to write results to + """ + + def __init__(self, table_name: str): + self.table_name = table_name + + super().__init__() + + def _prepare(self): + # It's called once, *before* processing + # Creates table resource and clean previous results + self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) + self.success_messages.clear() + + def _clean(self): + # It's called once, *after* closing processing all records (closing the context manager) + # Here we're sending, at once, all successful messages to a ddb table + with ddb_table.batch_writer() as batch: + for result in self.success_messages: + batch.put_item(Item=result) + + def _process_record(self, record): + # It handles how your record is processed + # Here we're keeping the status of each run + # where self.handler is the record_handler function passed as an argument + try: + result = self.handler(record) # record_handler passed to decorator/context manager + return self.success_handler(record, result) + except Exception as exc: + return self.failure_handler(record, exc) + + def success_handler(self, record): + entry = ("success", result, record) + message = {"age": result} + self.success_messages.append(message) + return entry + + + def record_handler(record): + return randint(0, 100) + + @batch_processor(record_handler=record_handler, processor=MyPartialProcessor(table_name)) + def lambda_handler(event, context): + return {"statusCode": 200} + ``` + +### Caveats + +#### Tracer response auto-capture for large batch sizes + +When using Tracer to capture responses for each batch record processing, you might exceed 64K of tracing data depending on what you return from your `record_handler` function, or how big is your batch size. + +If that's the case, you can configure [Tracer to disable response auto-capturing](../core/tracer.md#disabling-response-auto-capture){target="_blank"}. + + +```python hl_lines="14" title="Disabling Tracer response auto-capturing" +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method(capture_response=False) +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() + +``` + +## Testing your code + +As there is no external calls, you can unit test your code with `BatchProcessor` quite easily. + +**Example**: Given a SQS batch where the first batch record succeeds and the second fails processing, we should have a single item reported in the function response. + +=== "test_app.py" + + ```python + import json + + from pathlib import Path + from dataclasses import dataclass + + import pytest + from src.app import lambda_handler, processor + + + def load_event(path: Path): + with path.open() as f: + return json.load(f) + + + @pytest.fixture + def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + return LambdaContext() + + @pytest.fixture() + def sqs_event(): + """Generates API GW Event""" + return load_event(path=Path("events/sqs_event.json")) + + + def test_app_batch_partial_response(sqs_event, lambda_context): + # GIVEN + processor = app.processor # access processor for additional assertions + successful_record = sqs_event["Records"][0] + failed_record = sqs_event["Records"][1] + expected_response = { + "batchItemFailures: [ + { + "itemIdentifier": failed_record["messageId"] + } + ] + } + + # WHEN + ret = app.lambda_handler(sqs_event, lambda_context) + + # THEN + assert ret == expected_response + assert len(processor.fail_messages) == 1 + assert processor.success_messages[0] == successful_record + ``` + +=== "src/app.py" + + ```python + import json + + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord + from aws_lambda_powertools.utilities.typing import LambdaContext + + + processor = BatchProcessor(event_type=EventType.SQS) + tracer = Tracer() + logger = Logger() + + + @tracer.capture_method + def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + @logger.inject_lambda_context + @tracer.capture_lambda_handler + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context: LambdaContext): + return processor.response() + ``` + +=== "Sample SQS event" + + ```json title="events/sqs_sample.json" + { + "Records": [ + { + "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "{\"Message\": \"success\"}", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + }, + { + "messageId": "244fc6b4-87a3-44ab-83d2-361172410c3a", + "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", + "body": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0Lg==", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185" + }, + "messageAttributes": {}, + "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-2: 123456789012:my-queue", + "awsRegion": "us-east-1" + } + ] + } + ``` + + + +## FAQ + +### Choosing between decorator and context manager + +Use context manager when you want access to the processed messages or handle `BatchProcessingError` exception when all records within the batch fail to be processed. + +### Integrating exception handling with Sentry.io + +When using Sentry.io for error monitoring, you can override `failure_handler` to capture each processing exception with Sentry SDK: + +> Credits to [Charles-Axel Dein](https://github.com/awslabs/aws-lambda-powertools-python/issues/293#issuecomment-781961732) + +=== "sentry_integration.py" + + ```python hl_lines="4 7-8" + from typing import Tuple + + from aws_lambda_powertools.utilities.batch import BatchProcessor, FailureResponse + from sentry_sdk import capture_exception + + + class MyProcessor(BatchProcessor): + def failure_handler(self, record, exception) -> FailureResponse: + capture_exception() # send exception to Sentry + return super().failure_handler(record, exception) + ``` + + +## Legacy + +!!! tip "This is kept for historical purposes. Use the new [BatchProcessor](#processing-messages-from-sqs) instead." + + +### Migration guide + +!!! info "keep reading if you are using `sqs_batch_processor` or `PartialSQSProcessor`" + +[As of Nov 2021](https://aws.amazon.com/about-aws/whats-new/2021/11/aws-lambda-partial-batch-response-sqs-event-source/){target="_blank"}, this is no longer needed as both SQS, Kinesis, and DynamoDB Streams offer this capability natively with one caveat - it's an [opt-in feature](#required-resources). + +Being a native feature, we no longer need to instantiate boto3 nor other customizations like exception suppressing – this lowers the cost of your Lambda function as you can delegate deleting partial failures to Lambda. + +!!! tip "It's also easier to test since it's mostly a [contract based response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"}." + +You can migrate in three steps: + +1. If you are using **`sqs_batch_decorator`** you can now use **`batch_processor`** decorator +2. If you were using **`PartialSQSProcessor`** you can now use **`BatchProcessor`** +3. Change your Lambda Handler to return the new response format + + +=== "Decorator: Before" + + ```python hl_lines="1 6" + from aws_lambda_powertools.utilities.batch import sqs_batch_processor + + def record_handler(record): + return do_something_with(record["body"]) + + @sqs_batch_processor(record_handler=record_handler) + def lambda_handler(event, context): + return {"statusCode": 200} + ``` + +=== "Decorator: After" + + ```python hl_lines="3 5 11" + import json + + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + processor = BatchProcessor(event_type=EventType.SQS) + def record_handler(record): return do_something_with(record["body"]) + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + ``` + + +=== "Context manager: Before" + + ```python hl_lines="1-2 4 14 19" + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + from botocore.config import Config + + config = Config(region_name="us-east-1") + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + def lambda_handler(event, context): records = event["Records"] - processor = PartialSQSProcessor() + processor = PartialSQSProcessor(config=config) - with processor(records, record_handler) as proc: - result = proc.process() # Returns a list of all results from record_handler + with processor(records, record_handler): + result = processor.process() return result ``` +=== "Context manager: After" + + ```python hl_lines="1 11" + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + def lambda_handler(event, context): + records = event["Records"] + + processor = BatchProcessor(event_type=EventType.SQS) + + with processor(records, record_handler): + result = processor.process() + + return processor.response() + ``` + ### Customizing boto configuration The **`config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) @@ -267,98 +1450,3 @@ If you want to disable the default behavior where `SQSBatchProcessingError` is r with processor(records, record_handler): result = processor.process() ``` - -### Create your own partial processor - -You can create your own partial batch processor by inheriting the `BasePartialProcessor` class, and implementing `_prepare()`, `_clean()` and `_process_record()`. - -* **`_process_record()`** - Handles all processing logic for each individual message of a batch, including calling the `record_handler` (self.handler) -* **`_prepare()`** - Called once as part of the processor initialization -* **`clean()`** - Teardown logic called once after `_process_record` completes - -You can then use this class as a context manager, or pass it to `batch_processor` to use as a decorator on your Lambda handler function. - -=== "custom_processor.py" - - ```python hl_lines="3 9 24 30 37 57" - from random import randint - - from aws_lambda_powertools.utilities.batch import BasePartialProcessor, batch_processor - import boto3 - import os - - table_name = os.getenv("TABLE_NAME", "table_not_found") - - class MyPartialProcessor(BasePartialProcessor): - """ - Process a record and stores successful results at a Amazon DynamoDB Table - - Parameters - ---------- - table_name: str - DynamoDB table name to write results to - """ - - def __init__(self, table_name: str): - self.table_name = table_name - - super().__init__() - - def _prepare(self): - # It's called once, *before* processing - # Creates table resource and clean previous results - self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) - self.success_messages.clear() - - def _clean(self): - # It's called once, *after* closing processing all records (closing the context manager) - # Here we're sending, at once, all successful messages to a ddb table - with ddb_table.batch_writer() as batch: - for result in self.success_messages: - batch.put_item(Item=result) - - def _process_record(self, record): - # It handles how your record is processed - # Here we're keeping the status of each run - # where self.handler is the record_handler function passed as an argument - try: - result = self.handler(record) # record_handler passed to decorator/context manager - return self.success_handler(record, result) - except Exception as exc: - return self.failure_handler(record, exc) - - def success_handler(self, record): - entry = ("success", result, record) - message = {"age": result} - self.success_messages.append(message) - return entry - - - def record_handler(record): - return randint(0, 100) - - @batch_processor(record_handler=record_handler, processor=MyPartialProcessor(table_name)) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -### Integrating exception handling with Sentry.io - -When using Sentry.io for error monitoring, you can override `failure_handler` to include to capture each processing exception: - -> Credits to [Charles-Axel Dein](https://github.com/awslabs/aws-lambda-powertools-python/issues/293#issuecomment-781961732) - -=== "sentry_integration.py" - - ```python hl_lines="4 7-8" - from typing import Tuple - - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from sentry_sdk import capture_exception - - class SQSProcessor(PartialSQSProcessor): - def failure_handler(self, record: Event, exception: Tuple) -> Tuple: # type: ignore - capture_exception() # send exception to Sentry - logger.exception("got exception while processing SQS message") - return super().failure_handler(record, exception) # type: ignore - ``` diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index a453f0bfe07..cd6fc67ea15 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -1,12 +1,29 @@ -from typing import Callable +import json +from random import randint +from typing import Callable, Dict, Optional from unittest.mock import patch import pytest from botocore.config import Config from botocore.stub import Stubber -from aws_lambda_powertools.utilities.batch import PartialSQSProcessor, batch_processor, sqs_batch_processor -from aws_lambda_powertools.utilities.batch.exceptions import SQSBatchProcessingError +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + PartialSQSProcessor, + batch_processor, + sqs_batch_processor, +) +from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError, SQSBatchProcessingError +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord +from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.parser import BaseModel, validator +from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamChangedRecordModel, DynamoDBStreamRecordModel +from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord as KinesisDataStreamRecordModel +from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecordPayload, SqsRecordModel +from aws_lambda_powertools.utilities.parser.types import Literal +from tests.functional.utils import b64_to_str, str_to_b64 @pytest.fixture(scope="module") @@ -16,7 +33,12 @@ def factory(body: str): "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", "body": body, - "attributes": {}, + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1545082649183", + "SenderId": "AIDAIENQZJOLO23YVJ4VO", + "ApproximateFirstReceiveTimestamp": "1545082649185", + }, "messageAttributes": {}, "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", "eventSource": "aws:sqs", @@ -27,6 +49,53 @@ def factory(body: str): return factory +@pytest.fixture(scope="module") +def kinesis_event_factory() -> Callable: + def factory(body: str): + seq = "".join(str(randint(0, 9)) for _ in range(52)) + return { + "kinesis": { + "kinesisSchemaVersion": "1.0", + "partitionKey": "1", + "sequenceNumber": seq, + "data": str_to_b64(body), + "approximateArrivalTimestamp": 1545084650.987, + }, + "eventSource": "aws:kinesis", + "eventVersion": "1.0", + "eventID": f"shardId-000000000006:{seq}", + "eventName": "aws:kinesis:record", + "invokeIdentityArn": "arn:aws:iam::123456789012:role/lambda-role", + "awsRegion": "us-east-2", + "eventSourceARN": "arn:aws:kinesis:us-east-2:123456789012:stream/lambda-stream", + } + + return factory + + +@pytest.fixture(scope="module") +def dynamodb_event_factory() -> Callable: + def factory(body: str): + seq = "".join(str(randint(0, 9)) for _ in range(10)) + return { + "eventID": "1", + "eventVersion": "1.0", + "dynamodb": { + "Keys": {"Id": {"N": "101"}}, + "NewImage": {"Message": {"S": body}}, + "StreamViewType": "NEW_AND_OLD_IMAGES", + "SequenceNumber": seq, + "SizeBytes": 26, + }, + "awsRegion": "us-west-2", + "eventName": "INSERT", + "eventSourceARN": "eventsource_arn", + "eventSource": "aws:dynamodb", + } + + return factory + + @pytest.fixture(scope="module") def record_handler() -> Callable: def handler(record): @@ -38,6 +107,28 @@ def handler(record): return handler +@pytest.fixture(scope="module") +def kinesis_record_handler() -> Callable: + def handler(record: KinesisStreamRecord): + body = b64_to_str(record.kinesis.data) + if "fail" in body: + raise Exception("Failed to process record.") + return body + + return handler + + +@pytest.fixture(scope="module") +def dynamodb_record_handler() -> Callable: + def handler(record: DynamoDBRecord): + body = record.dynamodb.new_image.get("Message").get_value + if "fail" in body: + raise Exception("Failed to process record.") + return body + + return handler + + @pytest.fixture(scope="module") def config() -> Config: return Config(region_name="us-east-1") @@ -67,6 +158,14 @@ def stubbed_partial_processor_suppressed(config) -> PartialSQSProcessor: yield stubber, processor +@pytest.fixture(scope="module") +def order_event_factory() -> Callable: + def factory(item: Dict) -> str: + return json.dumps({"item": item}) + + return factory + + def test_partial_sqs_processor_context_with_failure(sqs_event_factory, record_handler, partial_processor): """ Test processor with one failing record @@ -290,3 +389,448 @@ def test_partial_sqs_processor_context_only_failure(sqs_event_factory, record_ha ctx.process() assert len(error.value.child_exceptions) == 2 + + +def test_batch_processor_middleware_success_only(sqs_event_factory, record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("success")) + second_record = SQSRecord(sqs_event_factory("success")) + event = {"Records": [first_record.raw_event, second_record.raw_event]} + + processor = BatchProcessor(event_type=EventType.SQS) + + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN + result = lambda_handler(event, {}) + + # THEN + assert result["batchItemFailures"] == [] + + +def test_batch_processor_middleware_with_failure(sqs_event_factory, record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("fail")) + second_record = SQSRecord(sqs_event_factory("success")) + event = {"Records": [first_record.raw_event, second_record.raw_event]} + + processor = BatchProcessor(event_type=EventType.SQS) + + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN + result = lambda_handler(event, {}) + + # THEN + assert len(result["batchItemFailures"]) == 1 + + +def test_batch_processor_context_success_only(sqs_event_factory, record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("success")) + second_record = SQSRecord(sqs_event_factory("success")) + records = [first_record.raw_event, second_record.raw_event] + processor = BatchProcessor(event_type=EventType.SQS) + + # WHEN + with processor(records, record_handler) as batch: + processed_messages = batch.process() + + # THEN + assert processed_messages == [ + ("success", first_record.body, first_record.raw_event), + ("success", second_record.body, second_record.raw_event), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_batch_processor_context_with_failure(sqs_event_factory, record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("failure")) + second_record = SQSRecord(sqs_event_factory("success")) + records = [first_record.raw_event, second_record.raw_event] + processor = BatchProcessor(event_type=EventType.SQS) + + # WHEN + with processor(records, record_handler) as batch: + processed_messages = batch.process() + + # THEN + assert processed_messages[1] == ("success", second_record.body, second_record.raw_event) + assert len(batch.fail_messages) == 1 + assert batch.response() == {"batchItemFailures": [{"itemIdentifier": first_record.message_id}]} + + +def test_batch_processor_kinesis_context_success_only(kinesis_event_factory, kinesis_record_handler): + # GIVEN + first_record = KinesisStreamRecord(kinesis_event_factory("success")) + second_record = KinesisStreamRecord(kinesis_event_factory("success")) + + records = [first_record.raw_event, second_record.raw_event] + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + + # WHEN + with processor(records, kinesis_record_handler) as batch: + processed_messages = batch.process() + + # THEN + assert processed_messages == [ + ("success", b64_to_str(first_record.kinesis.data), first_record.raw_event), + ("success", b64_to_str(second_record.kinesis.data), second_record.raw_event), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_batch_processor_kinesis_context_with_failure(kinesis_event_factory, kinesis_record_handler): + # GIVEN + first_record = KinesisStreamRecord(kinesis_event_factory("failure")) + second_record = KinesisStreamRecord(kinesis_event_factory("success")) + + records = [first_record.raw_event, second_record.raw_event] + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + + # WHEN + with processor(records, kinesis_record_handler) as batch: + processed_messages = batch.process() + + # THEN + assert processed_messages[1] == ("success", b64_to_str(second_record.kinesis.data), second_record.raw_event) + assert len(batch.fail_messages) == 1 + assert batch.response() == {"batchItemFailures": [{"itemIdentifier": first_record.kinesis.sequence_number}]} + + +def test_batch_processor_kinesis_middleware_with_failure(kinesis_event_factory, kinesis_record_handler): + # GIVEN + first_record = KinesisStreamRecord(kinesis_event_factory("failure")) + second_record = KinesisStreamRecord(kinesis_event_factory("success")) + event = {"Records": [first_record.raw_event, second_record.raw_event]} + + processor = BatchProcessor(event_type=EventType.KinesisDataStreams) + + @batch_processor(record_handler=kinesis_record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN + result = lambda_handler(event, {}) + + # THEN + assert len(result["batchItemFailures"]) == 1 + + +def test_batch_processor_dynamodb_context_success_only(dynamodb_event_factory, dynamodb_record_handler): + # GIVEN + first_record = dynamodb_event_factory("success") + second_record = dynamodb_event_factory("success") + records = [first_record, second_record] + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + + # WHEN + with processor(records, dynamodb_record_handler) as batch: + processed_messages = batch.process() + + # THEN + assert processed_messages == [ + ("success", first_record["dynamodb"]["NewImage"]["Message"]["S"], first_record), + ("success", second_record["dynamodb"]["NewImage"]["Message"]["S"], second_record), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_batch_processor_dynamodb_context_with_failure(dynamodb_event_factory, dynamodb_record_handler): + # GIVEN + first_record = dynamodb_event_factory("failure") + second_record = dynamodb_event_factory("success") + records = [first_record, second_record] + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + + # WHEN + with processor(records, dynamodb_record_handler) as batch: + processed_messages = batch.process() + + # THEN + assert processed_messages[1] == ("success", second_record["dynamodb"]["NewImage"]["Message"]["S"], second_record) + assert len(batch.fail_messages) == 1 + assert batch.response() == {"batchItemFailures": [{"itemIdentifier": first_record["dynamodb"]["SequenceNumber"]}]} + + +def test_batch_processor_dynamodb_middleware_with_failure(dynamodb_event_factory, dynamodb_record_handler): + # GIVEN + first_record = dynamodb_event_factory("failure") + second_record = dynamodb_event_factory("success") + event = {"Records": [first_record, second_record]} + + processor = BatchProcessor(event_type=EventType.DynamoDBStreams) + + @batch_processor(record_handler=dynamodb_record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN + result = lambda_handler(event, {}) + + # THEN + assert len(result["batchItemFailures"]) == 1 + + +def test_batch_processor_context_model(sqs_event_factory, order_event_factory): + # GIVEN + class Order(BaseModel): + item: dict + + class OrderSqs(SqsRecordModel): + body: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("body", pre=True) + def transform_body_to_dict(cls, value: str): + return json.loads(value) + + def record_handler(record: OrderSqs): + return record.body.item + + order_event = order_event_factory({"type": "success"}) + first_record = sqs_event_factory(order_event) + second_record = sqs_event_factory(order_event) + records = [first_record, second_record] + + # WHEN + processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqs) + with processor(records, record_handler) as batch: + processed_messages = batch.process() + + # THEN + order_item = json.loads(order_event)["item"] + assert processed_messages == [ + ("success", order_item, first_record), + ("success", order_item, second_record), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_batch_processor_context_model_with_failure(sqs_event_factory, order_event_factory): + # GIVEN + class Order(BaseModel): + item: dict + + class OrderSqs(SqsRecordModel): + body: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("body", pre=True) + def transform_body_to_dict(cls, value: str): + return json.loads(value) + + def record_handler(record: OrderSqs): + if "fail" in record.body.item["type"]: + raise Exception("Failed to process record.") + return record.body.item + + order_event = order_event_factory({"type": "success"}) + order_event_fail = order_event_factory({"type": "fail"}) + first_record = sqs_event_factory(order_event_fail) + second_record = sqs_event_factory(order_event) + records = [first_record, second_record] + + # WHEN + processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqs) + with processor(records, record_handler) as batch: + batch.process() + + # THEN + assert len(batch.fail_messages) == 1 + assert batch.response() == {"batchItemFailures": [{"itemIdentifier": first_record["messageId"]}]} + + +def test_batch_processor_dynamodb_context_model(dynamodb_event_factory, order_event_factory): + # GIVEN + class Order(BaseModel): + item: dict + + class OrderDynamoDB(BaseModel): + Message: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("Message", pre=True) + def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): + return json.loads(value["S"]) + + class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): + NewImage: Optional[OrderDynamoDB] + OldImage: Optional[OrderDynamoDB] + + class OrderDynamoDBRecord(DynamoDBStreamRecordModel): + dynamodb: OrderDynamoDBChangeRecord + + def record_handler(record: OrderDynamoDBRecord): + return record.dynamodb.NewImage.Message.item + + order_event = order_event_factory({"type": "success"}) + first_record = dynamodb_event_factory(order_event) + second_record = dynamodb_event_factory(order_event) + records = [first_record, second_record] + + # WHEN + processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderDynamoDBRecord) + with processor(records, record_handler) as batch: + processed_messages = batch.process() + + # THEN + order_item = json.loads(order_event)["item"] + assert processed_messages == [ + ("success", order_item, first_record), + ("success", order_item, second_record), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_batch_processor_dynamodb_context_model_with_failure(dynamodb_event_factory, order_event_factory): + # GIVEN + class Order(BaseModel): + item: dict + + class OrderDynamoDB(BaseModel): + Message: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("Message", pre=True) + def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): + return json.loads(value["S"]) + + class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): + NewImage: Optional[OrderDynamoDB] + OldImage: Optional[OrderDynamoDB] + + class OrderDynamoDBRecord(DynamoDBStreamRecordModel): + dynamodb: OrderDynamoDBChangeRecord + + def record_handler(record: OrderDynamoDBRecord): + if "fail" in record.dynamodb.NewImage.Message.item["type"]: + raise Exception("Failed to process record.") + return record.dynamodb.NewImage.Message.item + + order_event = order_event_factory({"type": "success"}) + order_event_fail = order_event_factory({"type": "fail"}) + first_record = dynamodb_event_factory(order_event_fail) + second_record = dynamodb_event_factory(order_event) + records = [first_record, second_record] + + # WHEN + processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderDynamoDBRecord) + with processor(records, record_handler) as batch: + batch.process() + + # THEN + assert len(batch.fail_messages) == 1 + assert batch.response() == {"batchItemFailures": [{"itemIdentifier": first_record["dynamodb"]["SequenceNumber"]}]} + + +def test_batch_processor_kinesis_context_parser_model(kinesis_event_factory, order_event_factory): + # GIVEN + class Order(BaseModel): + item: dict + + class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): + data: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("data", pre=True) + def transform_message_to_dict(cls, value: str): + # Powertools KinesisDataStreamRecordModel already decodes b64 to str here + return json.loads(value) + + class OrderKinesisRecord(KinesisDataStreamRecordModel): + kinesis: OrderKinesisPayloadRecord + + def record_handler(record: OrderKinesisRecord): + return record.kinesis.data.item + + order_event = order_event_factory({"type": "success"}) + first_record = kinesis_event_factory(order_event) + second_record = kinesis_event_factory(order_event) + records = [first_record, second_record] + + # WHEN + processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) + with processor(records, record_handler) as batch: + processed_messages = batch.process() + + # THEN + order_item = json.loads(order_event)["item"] + assert processed_messages == [ + ("success", order_item, first_record), + ("success", order_item, second_record), + ] + + assert batch.response() == {"batchItemFailures": []} + + +def test_batch_processor_kinesis_context_parser_model_with_failure(kinesis_event_factory, order_event_factory): + # GIVEN + class Order(BaseModel): + item: dict + + class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): + data: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("data", pre=True) + def transform_message_to_dict(cls, value: str): + # Powertools KinesisDataStreamRecordModel + return json.loads(value) + + class OrderKinesisRecord(KinesisDataStreamRecordModel): + kinesis: OrderKinesisPayloadRecord + + def record_handler(record: OrderKinesisRecord): + if "fail" in record.kinesis.data.item["type"]: + raise Exception("Failed to process record.") + return record.kinesis.data.item + + order_event = order_event_factory({"type": "success"}) + order_event_fail = order_event_factory({"type": "fail"}) + + first_record = kinesis_event_factory(order_event_fail) + second_record = kinesis_event_factory(order_event) + records = [first_record, second_record] + + # WHEN + processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) + with processor(records, record_handler) as batch: + batch.process() + + # THEN + assert len(batch.fail_messages) == 1 + assert batch.response() == {"batchItemFailures": [{"itemIdentifier": first_record["kinesis"]["sequenceNumber"]}]} + + +def test_batch_processor_error_when_entire_batch_fails(sqs_event_factory, record_handler): + # GIVEN + first_record = SQSRecord(sqs_event_factory("fail")) + second_record = SQSRecord(sqs_event_factory("fail")) + event = {"Records": [first_record.raw_event, second_record.raw_event]} + + processor = BatchProcessor(event_type=EventType.SQS) + + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + + # WHEN/THEN + with pytest.raises(BatchProcessingError): + lambda_handler(event, {}) diff --git a/tests/functional/utils.py b/tests/functional/utils.py index a58d27f3526..703f21744e2 100644 --- a/tests/functional/utils.py +++ b/tests/functional/utils.py @@ -1,3 +1,4 @@ +import base64 import json from pathlib import Path from typing import Any @@ -6,3 +7,11 @@ def load_event(file_name: str) -> Any: path = Path(str(Path(__file__).parent.parent) + "/events/" + file_name) return json.loads(path.read_text()) + + +def str_to_b64(data: str) -> str: + return base64.b64encode(data.encode()).decode("utf-8") + + +def b64_to_str(data: str) -> str: + return base64.b64decode(data.encode()).decode("utf-8") diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000000..e69de29bb2d From 849e003cb9ca5ae10478002ef3d110e095c6432b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 19 Dec 2021 19:23:14 +0000 Subject: [PATCH 31/36] chore(deps-dev): bump mypy from 0.910 to 0.920 (#903) Bumps [mypy](https://github.com/python/mypy) from 0.910 to 0.920.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=0.910&new-version=0.920)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 55 ++++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- 2 files changed, 27 insertions(+), 30 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6a6ec29f26c..6ab5cd763bd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -597,21 +597,21 @@ python-versions = ">=3.6" [[package]] name = "mypy" -version = "0.910" +version = "0.920" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" -toml = "*" -typed-ast = {version = ">=1.4.0,<1.5.0", markers = "python_version < \"3.8\""} +tomli = ">=1.1.0,<3.0.0" +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} typing-extensions = ">=3.7.4" [package.extras] dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<1.5.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] [[package]] name = "mypy-extensions" @@ -1056,7 +1056,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "d003a9b82e3692f6e55a5dde89dae18796d9c5747c9d097a0ec113ecb4e02f02" +content-hash = "df5193f3c984adfc79fe60b7cf6ef7b3e9b91d086cba5145faca227c7b2034d5" [metadata.files] atomicwrites = [ @@ -1361,29 +1361,26 @@ mkdocs-material-extensions = [ {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, ] mypy = [ - {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, - {file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"}, - {file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"}, - {file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"}, - {file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"}, - {file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"}, - {file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"}, - {file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"}, - {file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"}, - {file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"}, - {file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"}, - {file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"}, - {file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"}, - {file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"}, - {file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"}, - {file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"}, - {file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"}, - {file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"}, - {file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"}, - {file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"}, - {file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"}, - {file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"}, - {file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"}, + {file = "mypy-0.920-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41f3575b20714171c832d8f6c7aaaa0d499c9a2d1b8adaaf837b4c9065c38540"}, + {file = "mypy-0.920-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:431be889ffc8d9681813a45575c42e341c19467cbfa6dd09bf41467631feb530"}, + {file = "mypy-0.920-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8b2059f73878e92eff7ed11a03515d6572f4338a882dd7547b5f7dd242118e6"}, + {file = "mypy-0.920-cp310-cp310-win_amd64.whl", hash = "sha256:9cd316e9705555ca6a50670ba5fb0084d756d1d8cb1697c83820b1456b0bc5f3"}, + {file = "mypy-0.920-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e091fe58b4475b3504dc7c3022ff7f4af2f9e9ddf7182047111759ed0973bbde"}, + {file = "mypy-0.920-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98b4f91a75fed2e4c6339e9047aba95968d3a7c4b91e92ab9dc62c0c583564f4"}, + {file = "mypy-0.920-cp36-cp36m-win_amd64.whl", hash = "sha256:562a0e335222d5bbf5162b554c3afe3745b495d67c7fe6f8b0d1b5bace0c1eeb"}, + {file = "mypy-0.920-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:618e677aabd21f30670bffb39a885a967337f5b112c6fb7c79375e6dced605d6"}, + {file = "mypy-0.920-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40cb062f1b7ff4cd6e897a89d8ddc48c6ad7f326b5277c93a8c559564cc1551c"}, + {file = "mypy-0.920-cp37-cp37m-win_amd64.whl", hash = "sha256:69b5a835b12fdbfeed84ef31152d41343d32ccb2b345256d8682324409164330"}, + {file = "mypy-0.920-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:993c2e52ea9570e6e872296c046c946377b9f5e89eeb7afea2a1524cf6e50b27"}, + {file = "mypy-0.920-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df0fec878ccfcb2d1d2306ba31aa757848f681e7bbed443318d9bbd4b0d0fe9a"}, + {file = "mypy-0.920-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:331a81d2c9bf1be25317260a073b41f4584cd11701a7c14facef0aa5a005e843"}, + {file = "mypy-0.920-cp38-cp38-win_amd64.whl", hash = "sha256:ffb1e57ec49a30e3c0ebcfdc910ae4aceb7afb649310b7355509df6b15bd75f6"}, + {file = "mypy-0.920-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:31895b0b3060baf15bf76e789d94722c026f673b34b774bba9e8772295edccff"}, + {file = "mypy-0.920-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:140174e872d20d4768124a089b9f9fc83abd6a349b7f8cc6276bc344eb598922"}, + {file = "mypy-0.920-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:13b3c110309b53f5a62aa1b360f598124be33a42563b790a2a9efaacac99f1fc"}, + {file = "mypy-0.920-cp39-cp39-win_amd64.whl", hash = "sha256:82e6c15675264e923b60a11d6eb8f90665504352e68edfbb4a79aac7a04caddd"}, + {file = "mypy-0.920-py3-none-any.whl", hash = "sha256:71c77bd885d2ce44900731d4652d0d1c174dc66a0f11200e0c680bdedf1a6b37"}, + {file = "mypy-0.920.tar.gz", hash = "sha256:a55438627f5f546192f13255a994d6d1cf2659df48adcf966132b4379fd9c86b"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, diff --git a/pyproject.toml b/pyproject.toml index feded5bf884..532d78d0051 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ flake8-bugbear = "^21.11.29" mkdocs-material = "^7.3.6" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" -mypy = "^0.910" +mypy = "^0.920" [tool.poetry.extras] From 39b313651767027b357411fd8a2c632e14dd86f8 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Sun, 19 Dec 2021 22:59:12 -0800 Subject: [PATCH 32/36] chore: minor housekeeping before release (#912) Co-authored-by: heitorlessa --- .../utilities/batch/__init__.py | 2 +- aws_lambda_powertools/utilities/batch/base.py | 27 +++++++------------ .../utilities/batch/exceptions.py | 11 +++++--- .../utilities/parser/parser.py | 4 +-- pyproject.toml | 3 +++ .../idempotency/test_idempotency.py | 12 +++++++++ tests/functional/test_utilities_batch.py | 4 ++- 7 files changed, 38 insertions(+), 25 deletions(-) diff --git a/aws_lambda_powertools/utilities/batch/__init__.py b/aws_lambda_powertools/utilities/batch/__init__.py index 584342e5fd0..463f6f7fbff 100644 --- a/aws_lambda_powertools/utilities/batch/__init__.py +++ b/aws_lambda_powertools/utilities/batch/__init__.py @@ -8,11 +8,11 @@ BasePartialProcessor, BatchProcessor, EventType, - ExceptionInfo, FailureResponse, SuccessResponse, batch_processor, ) +from aws_lambda_powertools.utilities.batch.exceptions import ExceptionInfo from aws_lambda_powertools.utilities.batch.sqs import PartialSQSProcessor, sqs_batch_processor __all__ = ( diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index 02eb00ffaed..d8fdc2d85f2 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -8,11 +8,10 @@ import sys from abc import ABC, abstractmethod from enum import Enum -from types import TracebackType from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union, overload from aws_lambda_powertools.middleware_factory import lambda_handler_decorator -from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError +from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError, ExceptionInfo from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord @@ -30,8 +29,6 @@ class EventType(Enum): # type specifics # has_pydantic = "pydantic" in sys.modules -ExceptionInfo = Tuple[Type[BaseException], BaseException, TracebackType] -OptExcInfo = Union[ExceptionInfo, Tuple[None, None, None]] # For IntelliSense and Mypy to work, we need to account for possible SQS, Kinesis and DynamoDB subclasses # We need them as subclasses as we must access their message ID or sequence number metadata via dot notation @@ -61,7 +58,7 @@ class BasePartialProcessor(ABC): def __init__(self): self.success_messages: List[BatchEventTypes] = [] self.fail_messages: List[BatchEventTypes] = [] - self.exceptions: List = [] + self.exceptions: List[ExceptionInfo] = [] @abstractmethod def _prepare(self): @@ -132,7 +129,7 @@ def success_handler(self, record, result: Any) -> SuccessResponse: self.success_messages.append(record) return entry - def failure_handler(self, record, exception: OptExcInfo) -> FailureResponse: + def failure_handler(self, record, exception: ExceptionInfo) -> FailureResponse: """ Keeps track of batch records that failed processing @@ -140,7 +137,7 @@ def failure_handler(self, record, exception: OptExcInfo) -> FailureResponse: ---------- record: Any record that failed processing - exception: OptExcInfo + exception: ExceptionInfo Exception information containing type, value, and traceback (sys.exc_info()) Returns @@ -411,32 +408,28 @@ def _get_messages_to_report(self) -> Dict[str, str]: def _collect_sqs_failures(self): if self.model: return {"itemIdentifier": msg.messageId for msg in self.fail_messages} - else: - return {"itemIdentifier": msg.message_id for msg in self.fail_messages} + return {"itemIdentifier": msg.message_id for msg in self.fail_messages} def _collect_kinesis_failures(self): if self.model: # Pydantic model uses int but Lambda poller expects str return {"itemIdentifier": msg.kinesis.sequenceNumber for msg in self.fail_messages} - else: - return {"itemIdentifier": msg.kinesis.sequence_number for msg in self.fail_messages} + return {"itemIdentifier": msg.kinesis.sequence_number for msg in self.fail_messages} def _collect_dynamodb_failures(self): if self.model: return {"itemIdentifier": msg.dynamodb.SequenceNumber for msg in self.fail_messages} - else: - return {"itemIdentifier": msg.dynamodb.sequence_number for msg in self.fail_messages} + return {"itemIdentifier": msg.dynamodb.sequence_number for msg in self.fail_messages} @overload def _to_batch_type(self, record: dict, event_type: EventType, model: "BatchTypeModels") -> "BatchTypeModels": - ... + ... # pragma: no cover @overload def _to_batch_type(self, record: dict, event_type: EventType) -> EventSourceDataClassTypes: - ... + ... # pragma: no cover def _to_batch_type(self, record: dict, event_type: EventType, model: Optional["BatchTypeModels"] = None): if model is not None: return model.parse_obj(record) - else: - return self._DATA_CLASS_MAPPING[event_type](record) + return self._DATA_CLASS_MAPPING[event_type](record) diff --git a/aws_lambda_powertools/utilities/batch/exceptions.py b/aws_lambda_powertools/utilities/batch/exceptions.py index fe51433a5d6..dc4ca300c7c 100644 --- a/aws_lambda_powertools/utilities/batch/exceptions.py +++ b/aws_lambda_powertools/utilities/batch/exceptions.py @@ -2,11 +2,14 @@ Batch processing exceptions """ import traceback -from typing import Optional, Tuple +from types import TracebackType +from typing import List, Optional, Tuple, Type + +ExceptionInfo = Tuple[Type[BaseException], BaseException, TracebackType] class BaseBatchProcessingError(Exception): - def __init__(self, msg="", child_exceptions=()): + def __init__(self, msg="", child_exceptions: Optional[List[ExceptionInfo]] = None): super().__init__(msg) self.msg = msg self.child_exceptions = child_exceptions @@ -24,7 +27,7 @@ def format_exceptions(self, parent_exception_str): class SQSBatchProcessingError(BaseBatchProcessingError): """When at least one message within a batch could not be processed""" - def __init__(self, msg="", child_exceptions: Optional[Tuple[Exception]] = None): + def __init__(self, msg="", child_exceptions: Optional[List[ExceptionInfo]] = None): super().__init__(msg, child_exceptions) # Overriding this method so we can output all child exception tracebacks when we raise this exception to prevent @@ -37,7 +40,7 @@ def __str__(self): class BatchProcessingError(BaseBatchProcessingError): """When all batch records failed to be processed""" - def __init__(self, msg="", child_exceptions: Optional[Tuple[Exception]] = None): + def __init__(self, msg="", child_exceptions: Optional[List[ExceptionInfo]] = None): super().__init__(msg, child_exceptions) def __str__(self): diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py index a12f163f8a6..ef939cd11f7 100644 --- a/aws_lambda_powertools/utilities/parser/parser.py +++ b/aws_lambda_powertools/utilities/parser/parser.py @@ -86,12 +86,12 @@ def handler(event: Order, context: LambdaContext): @overload def parse(event: Dict[str, Any], model: Type[Model]) -> Model: - ... + ... # pragma: no cover @overload def parse(event: Dict[str, Any], model: Type[Model], envelope: Type[Envelope]) -> EnvelopeModel: - ... + ... # pragma: no cover def parse(event: Dict[str, Any], model: Type[Model], envelope: Optional[Type[Envelope]] = None): diff --git a/pyproject.toml b/pyproject.toml index 532d78d0051..c3b87d4f093 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,6 +85,9 @@ exclude_lines = [ # Don't complain if non-runnable code isn't run: "if 0:", "if __name__ == .__main__.:", + + # Ignore type function overload + "@overload", ] [tool.isort] diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index a8cf652d8a0..51e142bfa55 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -1057,3 +1057,15 @@ def two(data): assert one(data=mock_event) == "one" assert two(data=mock_event) == "two" assert len(persistence_store.table.method_calls) == 4 + + +def test_invalid_dynamodb_persistence_layer(): + # Scenario constructing a DynamoDBPersistenceLayer with a key_attr matching sort_key_attr should fail + with pytest.raises(ValueError) as ve: + DynamoDBPersistenceLayer( + table_name="Foo", + key_attr="id", + sort_key_attr="id", + ) + # and raise a ValueError + assert str(ve.value) == "key_attr [id] and sort_key_attr [id] cannot be the same!" diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index cd6fc67ea15..3728af3111d 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -832,5 +832,7 @@ def lambda_handler(event, context): return processor.response() # WHEN/THEN - with pytest.raises(BatchProcessingError): + with pytest.raises(BatchProcessingError) as e: lambda_handler(event, {}) + ret = str(e) + assert ret is not None From 5447dc423208c552d27b20dc1632803959334bcf Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 20 Dec 2021 08:24:30 +0100 Subject: [PATCH 33/36] feat(logger): allow handler with custom kwargs signature (#913) --- aws_lambda_powertools/logging/logger.py | 2 +- tests/functional/test_logger.py | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 0b9b52f8824..7ca5a18e6f4 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -328,7 +328,7 @@ def handler(event, context): ) @functools.wraps(lambda_handler) - def decorate(event, context): + def decorate(event, context, **kwargs): lambda_context = build_lambda_context_model(context) cold_start = _is_cold_start() diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index 3c9a8a54189..3fb43474081 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -597,3 +597,16 @@ def handler(event, context): first_log, second_log = capture_multiple_logging_statements_output(stdout) assert "my_key" in first_log assert "my_key" not in second_log + + +def test_inject_lambda_context_allows_handler_with_kwargs(lambda_context, stdout, service_name): + # GIVEN + logger = Logger(service=service_name, stream=stdout) + + # WHEN + @logger.inject_lambda_context(clear_state=True) + def handler(event, context, my_custom_option=None): + pass + + # THEN + handler({}, lambda_context, my_custom_option="blah") From b144f755792c82098fd3710c8186e832caa809c7 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 20 Dec 2021 08:35:19 +0100 Subject: [PATCH 34/36] docs: external reference to cloudformation custom resource helper (#914) --- mkdocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/mkdocs.yml b/mkdocs.yml index 54a0fa50a67..511cefb5e67 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -27,6 +27,7 @@ nav: - utilities/idempotency.md - utilities/feature_flags.md - utilities/jmespath_functions.md + - CloudFormation Custom Resources: https://github.com/aws-cloudformation/custom-resource-helper" target="_blank theme: name: material From e7e11033386284c88980a3c2dab86206a66a925d Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 20 Dec 2021 09:43:20 +0100 Subject: [PATCH 35/36] docs(apigateway): add new not_found feature (#915) --- docs/core/event_handler/api_gateway.md | 87 ++++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 88 insertions(+) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 1f5fa4479c0..cb06aa37841 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -478,6 +478,93 @@ Similarly to [Query strings](#query-strings-and-payload), you can access headers return app.resolve(event, context) ``` + +### Handling not found routes + +By default, we return `404` for any unmatched route. + +You can use **`not_found`** decorator to override this behaviour, and return a custom **`Response`**. + +=== "app.py" + + ```python hl_lines="11 13 16" title="Handling not found" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import content_types + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response + from aws_lambda_powertools.event_handler.exceptions import NotFoundError + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() + + @app.not_found + @tracer.capture_method + def handle_not_found_errors(exc: NotFoundError) -> Response: + # Return 418 upon 404 errors + logger.info(f"Not found route: {app.current_event.path}") + return Response( + status_code=418, + content_type=content_types.TEXT_PLAIN, + body="I'm a teapot!" + ) + + + @app.get("/catch/me/if/you/can") + @tracer.capture_method + def catch_me_if_you_can(): + return {"message": "oh hey"} + + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + + +### Exception handling + +You can use **`exception_handler`** decorator with any Python exception. This allows you to handle a common exception outside your route, for example validation errors. + +=== "app.py" + + ```python hl_lines="10 15" title="Exception handling" + from aws_lambda_powertools import Logger, Tracer + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import content_types + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver, Response + + tracer = Tracer() + logger = Logger() + app = ApiGatewayResolver() + + @app.exception_handler(ValueError) + def handle_value_error(ex: ValueError): + metadata = {"path": app.current_event.path} + logger.error(f"Malformed request: {ex}", extra=metadata) + + return Response( + status_code=400, + content_type=content_types.TEXT_PLAIN, + body="Invalid request", + ) + + + @app.get("/hello") + @tracer.capture_method + def hello_name(): + name = app.current_event.get_query_string_value(name="name") + if name is not None: + raise ValueError("name query string must be present") + return {"message": f"hello {name}"} + + @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + + ### Raising HTTP errors You can easily raise any HTTP Error back to the client using `ServiceError` exception. diff --git a/mkdocs.yml b/mkdocs.yml index 511cefb5e67..218deea586b 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -49,6 +49,7 @@ theme: - navigation.sections - navigation.expand - navigation.top + - navigation.instant icon: repo: fontawesome/brands/github logo: media/aws-logo-light.svg From da5e1f0e5f840c6be9adf6a5709368203926b808 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 20 Dec 2021 10:01:28 +0100 Subject: [PATCH 36/36] chore: bump to 1.23.0 --- CHANGELOG.md | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index adc3a14aad1..c8808f15cc6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,56 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] + +## 1.23.0 - 2021-12-20 + +### Bug Fixes + +* **apigateway:** allow list of HTTP methods in route method ([#838](https://github.com/awslabs/aws-lambda-powertools-python/issues/838)) +* **event-sources:** pass authorizer data to APIGatewayEventAuthorizer ([#897](https://github.com/awslabs/aws-lambda-powertools-python/issues/897)) +* **event-sources:** handle claimsOverrideDetails set to null ([#878](https://github.com/awslabs/aws-lambda-powertools-python/issues/878)) +* **idempotency:** include decorated fn name in hash ([#869](https://github.com/awslabs/aws-lambda-powertools-python/issues/869)) +* **metrics:** explicit type to single_metric ctx manager ([#865](https://github.com/awslabs/aws-lambda-powertools-python/issues/865)) +* **parameters:** mypy appconfig transform and return types ([#877](https://github.com/awslabs/aws-lambda-powertools-python/issues/877)) +* **parser:** mypy overload parse when using envelope ([#885](https://github.com/awslabs/aws-lambda-powertools-python/issues/885)) +* **parser:** kinesis sequence number is str, not int ([#907](https://github.com/awslabs/aws-lambda-powertools-python/issues/907)) +* **parser:** mypy support for payload type override as models ([#883](https://github.com/awslabs/aws-lambda-powertools-python/issues/883)) +* **tracer:** add warm start annotation (ColdStart=False) ([#851](https://github.com/awslabs/aws-lambda-powertools-python/issues/851)) + +### Documentation + +* **nav**: reference cloudformation custom resource helper (CRD) ([#914](https://github.com/awslabs/aws-lambda-powertools-python/issues/914)) +* add new public Slack invite +* disable search blur in non-prod env +* update Lambda Layers version +* **apigateway:** add new not_found feature ([#915](https://github.com/awslabs/aws-lambda-powertools-python/issues/915)) +* **apigateway:** fix sample layout provided ([#864](https://github.com/awslabs/aws-lambda-powertools-python/issues/864)) +* **appsync:** fix users.py typo to locations [#830](https://github.com/awslabs/aws-lambda-powertools-python/issues/830) +* **lambda_layer:** fix CDK layer syntax + +### Features + +* **apigateway:** add exception_handler support ([#898](https://github.com/awslabs/aws-lambda-powertools-python/issues/898)) +* **apigateway:** access parent api resolver from router ([#842](https://github.com/awslabs/aws-lambda-powertools-python/issues/842)) +* **batch:** new BatchProcessor for SQS, DynamoDB, Kinesis ([#886](https://github.com/awslabs/aws-lambda-powertools-python/issues/886)) +* **logger:** allow handler with custom kwargs signature ([#913](https://github.com/awslabs/aws-lambda-powertools-python/issues/913)) +* **tracer:** add service annotation when service is set ([#861](https://github.com/awslabs/aws-lambda-powertools-python/issues/861)) + +### Maintenance + +* minor housekeeping before release ([#912](https://github.com/awslabs/aws-lambda-powertools-python/issues/912)) +* correct pr label order +* **ci:** split latest docs workflow +* **deps:** bump fastjsonschema from 2.15.1 to 2.15.2 ([#891](https://github.com/awslabs/aws-lambda-powertools-python/issues/891)) +* **deps:** bump actions/setup-python from 2.2.2 to 2.3.0 ([#831](https://github.com/awslabs/aws-lambda-powertools-python/issues/831)) +* **deps:** support arm64 when developing locally ([#862](https://github.com/awslabs/aws-lambda-powertools-python/issues/862)) +* **deps:** bump actions/setup-python from 2.3.0 to 2.3.1 ([#852](https://github.com/awslabs/aws-lambda-powertools-python/issues/852)) +* **deps:** bump aws-xray-sdk from 2.8.0 to 2.9.0 ([#876](https://github.com/awslabs/aws-lambda-powertools-python/issues/876)) +* **deps-dev:** bump mypy from 0.910 to 0.920 ([#903](https://github.com/awslabs/aws-lambda-powertools-python/issues/903)) +* **deps-dev:** bump flake8 from 3.9.2 to 4.0.1 ([#789](https://github.com/awslabs/aws-lambda-powertools-python/issues/789)) +* **deps-dev:** bump black from 21.10b0 to 21.11b1 ([#839](https://github.com/awslabs/aws-lambda-powertools-python/issues/839)) +* **deps-dev:** bump black from 21.11b1 to 21.12b0 ([#872](https://github.com/awslabs/aws-lambda-powertools-python/issues/872)) + ## 1.22.0 - 2021-11-17 Tenet update! We've updated **Idiomatic** tenet to **Progressive** to reflect the new Router feature in Event Handler, and more importantly the new wave of customers coming from SRE, Data Analysis, and Data Science background. diff --git a/pyproject.toml b/pyproject.toml index c3b87d4f093..b55dc5de33a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.22.0" +version = "1.23.0" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]