From 96cbdc1910aa4bdcdb47efef587c04a17f75436e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 22 Aug 2021 08:28:22 +0200 Subject: [PATCH 01/40] fix(idempotency): sorting keys before hashing --- .../utilities/idempotency/persistence/base.py | 4 +-- tests/functional/idempotency/conftest.py | 14 +++++--- .../idempotency/test_idempotency.py | 34 ++++++++++++++----- 3 files changed, 37 insertions(+), 15 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 2f5dd512ac6..4901e9f9f75 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -223,7 +223,7 @@ def _generate_hash(self, data: Any) -> str: """ data = getattr(data, "raw_event", data) # could be a data class depending on decorator order - hashed_data = self.hash_function(json.dumps(data, cls=Encoder).encode()) + hashed_data = self.hash_function(json.dumps(data, cls=Encoder, sort_keys=True).encode()) return hashed_data.hexdigest() def _validate_payload(self, data: Dict[str, Any], data_record: DataRecord) -> None: @@ -310,7 +310,7 @@ def save_success(self, data: Dict[str, Any], result: dict) -> None: result: dict The response from function """ - response_data = json.dumps(result, cls=Encoder) + response_data = json.dumps(result, cls=Encoder, sort_keys=True) data_record = DataRecord( idempotency_key=self._get_hashed_idempotency_key(data=data), diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index e613bb85e60..2c528cafc50 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -21,6 +21,10 @@ TABLE_NAME = "TEST_TABLE" +def serialize(data): + return json.dumps(data, sort_keys=True, cls=Encoder) + + @pytest.fixture(scope="module") def config() -> Config: return Config(region_name="us-east-1") @@ -62,12 +66,12 @@ def lambda_response(): @pytest.fixture(scope="module") def serialized_lambda_response(lambda_response): - return json.dumps(lambda_response, cls=Encoder) + return serialize(lambda_response) @pytest.fixture(scope="module") def deserialized_lambda_response(lambda_response): - return json.loads(json.dumps(lambda_response, cls=Encoder)) + return json.loads(serialize(lambda_response)) @pytest.fixture @@ -144,7 +148,7 @@ def expected_params_put_item_with_validation(hashed_idempotency_key, hashed_vali def hashed_idempotency_key(lambda_apigw_event, default_jmespath, lambda_context): compiled_jmespath = jmespath.compile(default_jmespath) data = compiled_jmespath.search(lambda_apigw_event) - return "test-func#" + hashlib.md5(json.dumps(data).encode()).hexdigest() + return "test-func#" + hashlib.md5(serialize(data).encode()).hexdigest() @pytest.fixture @@ -152,12 +156,12 @@ def hashed_idempotency_key_with_envelope(lambda_apigw_event): event = extract_data_from_envelope( data=lambda_apigw_event, envelope=envelopes.API_GATEWAY_HTTP, jmespath_options={} ) - return "test-func#" + hashlib.md5(json.dumps(event).encode()).hexdigest() + return "test-func#" + hashlib.md5(serialize(event).encode()).hexdigest() @pytest.fixture def hashed_validation_key(lambda_apigw_event): - return hashlib.md5(json.dumps(lambda_apigw_event["requestContext"]).encode()).hexdigest() + return hashlib.md5(serialize(lambda_apigw_event["requestContext"]).encode()).hexdigest() @pytest.fixture diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 5505a7dc5c9..cb0d43ae6fa 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -21,6 +21,7 @@ from aws_lambda_powertools.utilities.idempotency.idempotency import idempotent, idempotent_function from aws_lambda_powertools.utilities.idempotency.persistence.base import BasePersistenceLayer, DataRecord from aws_lambda_powertools.utilities.validation import envelopes, validator +from tests.functional.idempotency.conftest import serialize from tests.functional.utils import load_event TABLE_NAME = "TEST_TABLE" @@ -741,7 +742,7 @@ def test_default_no_raise_on_missing_idempotency_key( hashed_key = persistence_store._get_hashed_idempotency_key({}) # THEN return the hash of None - expected_value = "test-func#" + md5(json.dumps(None).encode()).hexdigest() + expected_value = "test-func#" + md5(serialize(None).encode()).hexdigest() assert expected_value == hashed_key @@ -785,7 +786,7 @@ def test_jmespath_with_powertools_json( expected_value = [sub_attr_value, key_attr_value] api_gateway_proxy_event = { "requestContext": {"authorizer": {"claims": {"sub": sub_attr_value}}}, - "body": json.dumps({"id": key_attr_value}), + "body": serialize({"id": key_attr_value}), } # WHEN calling _get_hashed_idempotency_key @@ -869,7 +870,7 @@ def _delete_record(self, data_record: DataRecord) -> None: def test_idempotent_lambda_event_source(lambda_context): # Scenario to validate that we can use the event_source decorator before or after the idempotent decorator mock_event = load_event("apiGatewayProxyV2Event.json") - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) expected_result = {"message": "Foo"} # GIVEN an event_source decorator @@ -889,7 +890,7 @@ def lambda_handler(event, _): def test_idempotent_function(): # Scenario to validate we can use idempotent_function with any function mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) expected_result = {"message": "Foo"} @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") @@ -906,7 +907,7 @@ def test_idempotent_function_arbitrary_args_kwargs(): # Scenario to validate we can use idempotent_function with a function # with an arbitrary number of args and kwargs mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) expected_result = {"message": "Foo"} @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") @@ -921,7 +922,7 @@ def record_handler(arg_one, arg_two, record, is_record): def test_idempotent_function_invalid_data_kwarg(): mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) expected_result = {"message": "Foo"} keyword_argument = "payload" @@ -938,7 +939,7 @@ def record_handler(record): def test_idempotent_function_arg_instead_of_kwarg(): mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) expected_result = {"message": "Foo"} keyword_argument = "record" @@ -956,7 +957,7 @@ def record_handler(record): def test_idempotent_function_and_lambda_handler(lambda_context): # Scenario to validate we can use both idempotent_function and idempotent decorators mock_event = {"data": "value"} - persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(mock_event).encode()).hexdigest()) + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(serialize(mock_event).encode()).hexdigest()) expected_result = {"message": "Foo"} @idempotent_function(persistence_store=persistence_layer, data_keyword_argument="record") @@ -976,3 +977,20 @@ def lambda_handler(event, _): # THEN we expect the function and lambda handler to execute successfully assert fn_result == expected_result assert handler_result == expected_result + + +def test_idempotent_data_sorting(): + # Scenario to validate same data in different order hashes to the same idempotency key + data_one = {"data": "test message 1", "more_data": "more data 1"} + data_two = {"more_data": "more data 1", "data": "test message 1"} + + # Assertion will happen in MockPersistenceLayer + persistence_layer = MockPersistenceLayer("test-func#" + hashlib.md5(json.dumps(data_one).encode()).hexdigest()) + + # GIVEN + @idempotent_function(data_keyword_argument="payload", persistence_store=persistence_layer) + def dummy(payload): + return {"message": "hello"} + + # WHEN + dummy(payload=data_two) From 52c685cc138d6fa4ebf5bb1ec1f39d63078c0398 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Sep 2021 10:52:09 +0000 Subject: [PATCH 02/40] chore(deps-dev): bump xenon from 0.7.3 to 0.8.0 (#669) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [xenon](https://github.com/rubik/xenon) from 0.7.3 to 0.8.0.
Changelog

Sourced from xenon's changelog.

0.8.0 (Sep 03, 2021)

  • Widen requirements to allow Radon v5: #43, thanks to @​dsch
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=xenon&package-manager=pip&previous-version=0.7.3&new-version=0.8.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index db58802632b..c1d312ace6d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1030,7 +1030,7 @@ python-versions = "*" [[package]] name = "xenon" -version = "0.7.3" +version = "0.8.0" description = "Monitor code metrics for Python on your CI server" category = "dev" optional = false @@ -1038,7 +1038,7 @@ python-versions = "*" [package.dependencies] PyYAML = ">=4.2b1,<6.0" -radon = ">=4,<5" +radon = ">=4,<6" requests = ">=2.0,<3.0" [[package]] @@ -1059,7 +1059,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "405d8f2eb75f911de58f5a57331a48ee6800a0c4065abe6c647fc8c7f0c25b87" +content-hash = "3716d65cd1018286bba842e20603d4793dcbb0119a4c6a9b5542166dfa3261d0" [metadata.files] appdirs = [ @@ -1690,8 +1690,8 @@ wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] xenon = [ - {file = "xenon-0.7.3-py2.py3-none-any.whl", hash = "sha256:a167b4c329fbea7cd84b148007ba92142f46b88ca095488c175dc7a8a8007ee9"}, - {file = "xenon-0.7.3.tar.gz", hash = "sha256:eda949fbf3cfb4851d49d97e961e2b18a6b66fbecaf285dc89230775d2b2a99f"}, + {file = "xenon-0.8.0-py2.py3-none-any.whl", hash = "sha256:4c3d7157d9ae058364e130c831702e4a65a1f729d4b4def912418ed09772c851"}, + {file = "xenon-0.8.0.tar.gz", hash = "sha256:cd5cad0930673d0e52609712c63fe4721a8f4c4342dc338bd7ea5fa0666b8515"}, ] zipp = [ {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, diff --git a/pyproject.toml b/pyproject.toml index a54704d652e..4c9c4738d89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ pdoc3 = "^0.10.0" pytest-asyncio = "^0.15.1" bandit = "^1.7.0" radon = "^4.5.0" -xenon = "^0.7.3" +xenon = "^0.8.0" flake8-eradicate = "^1.1.0" flake8-bugbear = "^21.3.2" mkdocs-material = "^7.2.6" From 42abbc895831f9a235a7c3f86036b8771eb1328d Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Thu, 9 Sep 2021 03:52:17 -0700 Subject: [PATCH 03/40] fix(data-classes): use correct asdict funciton (#666) --- docs/utilities/data_classes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 6cd487a2092..1e0b76b018b 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -299,7 +299,7 @@ In this example extract the `requestId` as the `correlation_id` for logging, use if not user: # No user found, return not authorized - return AppSyncAuthorizerResponse().to_dict() + return AppSyncAuthorizerResponse().asdict() return AppSyncAuthorizerResponse( authorize=True, From 2506c948c52f288efa27922e9c26f4eb4c897239 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Thu, 9 Sep 2021 03:54:16 -0700 Subject: [PATCH 04/40] refactor(data-classes): clean up internal logic for APIGatewayAuthorizerResponse (#643) --- .pylintrc | 12 ++ .../api_gateway_authorizer_event.py | 109 +++++++++++++----- docs/utilities/data_classes.md | 29 ++--- .../test_api_gateway_authorizer.py | 35 +++++- 4 files changed, 142 insertions(+), 43 deletions(-) create mode 100644 .pylintrc diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000000..cf0445d7d27 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,12 @@ +[MESSAGES CONTROL] +disable= + too-many-arguments, + too-many-instance-attributes, + too-few-public-methods, + anomalous-backslash-in-string, + missing-class-docstring, + missing-module-docstring, + missing-function-docstring, + +[FORMAT] +max-line-length=120 diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py index 29694eacd97..4682711af92 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py @@ -234,10 +234,12 @@ def raw_query_string(self) -> str: @property def cookies(self) -> List[str]: + """Cookies""" return self["cookies"] @property def headers(self) -> Dict[str, str]: + """Http headers""" return self["headers"] @property @@ -314,6 +316,8 @@ def asdict(self) -> dict: class HttpVerb(enum.Enum): + """Enum of http methods / verbs""" + GET = "GET" POST = "POST" PUT = "PUT" @@ -324,15 +328,32 @@ class HttpVerb(enum.Enum): ALL = "*" +DENY_ALL_RESPONSE = { + "principalId": "deny-all-user", + "policyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Action": "execute-api:Invoke", + "Effect": "Deny", + "Resource": ["*"], + } + ], + }, +} + + class APIGatewayAuthorizerResponse: - """Api Gateway HTTP API V1 payload or Rest api authorizer response helper + """The IAM Policy Response required for API Gateway REST APIs and HTTP APIs. Based on: - https://github.com/awslabs/aws-apigateway-lambda-authorizer-blueprints/blob/\ master/blueprints/python/api-gateway-authorizer-python.py - """ - version = "2012-10-17" - """The policy version used for the evaluation. This should always be '2012-10-17'""" + Documentation: + ------------- + - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-lambda-authorizer.html + - https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-lambda-authorizer-output.html + """ path_regex = r"^[/.a-zA-Z0-9-\*]+$" """The regular expression used to validate resource paths for the policy""" @@ -345,6 +366,7 @@ def __init__( api_id: str, stage: str, context: Optional[Dict] = None, + usage_identifier_key: Optional[str] = None, ): """ Parameters @@ -373,6 +395,10 @@ def __init__( context : Dict, optional Optional, context. Note: only names of type string and values of type int, string or boolean are supported + usage_identifier_key: str, optional + If the API uses a usage plan (the apiKeySource is set to `AUTHORIZER`), the Lambda authorizer function + must return one of the usage plan's API keys as the usageIdentifierKey property value. + > **Note:** This only applies for REST APIs. """ self.principal_id = principal_id self.region = region @@ -380,25 +406,46 @@ def __init__( self.api_id = api_id self.stage = stage self.context = context + self.usage_identifier_key = usage_identifier_key self._allow_routes: List[Dict] = [] self._deny_routes: List[Dict] = [] + self._resource_pattern = re.compile(self.path_regex) - def _add_route(self, effect: str, verb: str, resource: str, conditions: List[Dict]): + @staticmethod + def from_route_arn( + arn: str, + principal_id: str, + context: Optional[Dict] = None, + usage_identifier_key: Optional[str] = None, + ) -> "APIGatewayAuthorizerResponse": + parsed_arn = parse_api_gateway_arn(arn) + return APIGatewayAuthorizerResponse( + principal_id, + parsed_arn.region, + parsed_arn.aws_account_id, + parsed_arn.api_id, + parsed_arn.stage, + context, + usage_identifier_key, + ) + + def _add_route(self, effect: str, http_method: str, resource: str, conditions: Optional[List[Dict]] = None): """Adds a route to the internal lists of allowed or denied routes. Each object in the internal list contains a resource ARN and a condition statement. The condition statement can be null.""" - if verb != "*" and verb not in HttpVerb.__members__: + if http_method != "*" and http_method not in HttpVerb.__members__: allowed_values = [verb.value for verb in HttpVerb] - raise ValueError(f"Invalid HTTP verb: '{verb}'. Use either '{allowed_values}'") + raise ValueError(f"Invalid HTTP verb: '{http_method}'. Use either '{allowed_values}'") - resource_pattern = re.compile(self.path_regex) - if not resource_pattern.match(resource): + if not self._resource_pattern.match(resource): raise ValueError(f"Invalid resource path: {resource}. Path should match {self.path_regex}") if resource[:1] == "/": resource = resource[1:] - resource_arn = APIGatewayRouteArn(self.region, self.aws_account_id, self.api_id, self.stage, verb, resource).arn + resource_arn = APIGatewayRouteArn( + self.region, self.aws_account_id, self.api_id, self.stage, http_method, resource + ).arn route = {"resourceArn": resource_arn, "conditions": conditions} @@ -412,24 +459,27 @@ def _get_empty_statement(effect: str) -> Dict[str, Any]: """Returns an empty statement object prepopulated with the correct action and the desired effect.""" return {"Action": "execute-api:Invoke", "Effect": effect.capitalize(), "Resource": []} - def _get_statement_for_effect(self, effect: str, methods: List) -> List: - """This function loops over an array of objects containing a resourceArn and - conditions statement and generates the array of statements for the policy.""" - if len(methods) == 0: + def _get_statement_for_effect(self, effect: str, routes: List[Dict]) -> List[Dict]: + """This function loops over an array of objects containing a `resourceArn` and + `conditions` statement and generates the array of statements for the policy.""" + if not routes: return [] - statements = [] - + statements: List[Dict] = [] statement = self._get_empty_statement(effect) - for method in methods: - if method["conditions"] is None or len(method["conditions"]) == 0: - statement["Resource"].append(method["resourceArn"]) - else: + + for route in routes: + resource_arn = route["resourceArn"] + conditions = route.get("conditions") + if conditions is not None and len(conditions) > 0: conditional_statement = self._get_empty_statement(effect) - conditional_statement["Resource"].append(method["resourceArn"]) - conditional_statement["Condition"] = method["conditions"] + conditional_statement["Resource"].append(resource_arn) + conditional_statement["Condition"] = conditions statements.append(conditional_statement) + else: + statement["Resource"].append(resource_arn) + if len(statement["Resource"]) > 0: statements.append(statement) @@ -442,7 +492,7 @@ def allow_all_routes(self, http_method: str = HttpVerb.ALL.value): ---------- http_method: str """ - self._add_route(effect="Allow", verb=http_method, resource="*", conditions=[]) + self._add_route(effect="Allow", http_method=http_method, resource="*") def deny_all_routes(self, http_method: str = HttpVerb.ALL.value): """Adds a '*' allow to the policy to deny access to all methods of an API @@ -452,7 +502,7 @@ def deny_all_routes(self, http_method: str = HttpVerb.ALL.value): http_method: str """ - self._add_route(effect="Deny", verb=http_method, resource="*", conditions=[]) + self._add_route(effect="Deny", http_method=http_method, resource="*") def allow_route(self, http_method: str, resource: str, conditions: Optional[List[Dict]] = None): """Adds an API Gateway method (Http verb + Resource path) to the list of allowed @@ -460,8 +510,7 @@ def allow_route(self, http_method: str, resource: str, conditions: Optional[List Optionally includes a condition for the policy statement. More on AWS policy conditions here: https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition""" - conditions = conditions or [] - self._add_route(effect="Allow", verb=http_method, resource=resource, conditions=conditions) + self._add_route(effect="Allow", http_method=http_method, resource=resource, conditions=conditions) def deny_route(self, http_method: str, resource: str, conditions: Optional[List[Dict]] = None): """Adds an API Gateway method (Http verb + Resource path) to the list of denied @@ -469,8 +518,7 @@ def deny_route(self, http_method: str, resource: str, conditions: Optional[List[ Optionally includes a condition for the policy statement. More on AWS policy conditions here: https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition""" - conditions = conditions or [] - self._add_route(effect="Deny", verb=http_method, resource=resource, conditions=conditions) + self._add_route(effect="Deny", http_method=http_method, resource=resource, conditions=conditions) def asdict(self) -> Dict[str, Any]: """Generates the policy document based on the internal lists of allowed and denied @@ -482,12 +530,15 @@ def asdict(self) -> Dict[str, Any]: response: Dict[str, Any] = { "principalId": self.principal_id, - "policyDocument": {"Version": self.version, "Statement": []}, + "policyDocument": {"Version": "2012-10-17", "Statement": []}, } response["policyDocument"]["Statement"].extend(self._get_statement_for_effect("Allow", self._allow_routes)) response["policyDocument"]["Statement"].extend(self._get_statement_for_effect("Deny", self._deny_routes)) + if self.usage_identifier_key: + response["usageIdentifierKey"] = self.usage_identifier_key + if self.context: response["context"] = self.context diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 1e0b76b018b..e05193c7702 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -96,9 +96,10 @@ Use **`APIGatewayAuthorizerRequestEvent`** for type `REQUEST` and **`APIGatewayA When the user is found, it includes the user details in the request context that will be available to the back-end, and returns a full access policy for admin users. - ```python hl_lines="2-5 26-31 36-37 40 44 46" + ```python hl_lines="2-6 29 36-42 47 49" from aws_lambda_powertools.utilities.data_classes import event_source from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( + DENY_ALL_RESPONSE, APIGatewayAuthorizerRequestEvent, APIGatewayAuthorizerResponse, HttpVerb, @@ -108,9 +109,9 @@ Use **`APIGatewayAuthorizerRequestEvent`** for type `REQUEST` and **`APIGatewayA def get_user_by_token(token): if compare_digest(token, "admin-foo"): - return {"isAdmin": True, "name": "Admin"} + return {"id": 0, "name": "Admin", "isAdmin": True} elif compare_digest(token, "regular-foo"): - return {"name": "Joe"} + return {"id": 1, "name": "Joe"} else: return None @@ -119,25 +120,27 @@ Use **`APIGatewayAuthorizerRequestEvent`** for type `REQUEST` and **`APIGatewayA def handler(event: APIGatewayAuthorizerRequestEvent, context): user = get_user_by_token(event.get_header_value("Authorization")) + if user is None: + # No user was found + # to return 401 - `{"message":"Unauthorized"}`, but pollutes lambda error count metrics + # raise Exception("Unauthorized") + # to return 403 - `{"message":"Forbidden"}` + return DENY_ALL_RESPONSE + # parse the `methodArn` as an `APIGatewayRouteArn` arn = event.parsed_arn + # Create the response builder from parts of the `methodArn` + # and set the logged in user id and context policy = APIGatewayAuthorizerResponse( - principal_id="user", + principal_id=user["id"], + context=user, region=arn.region, aws_account_id=arn.aws_account_id, api_id=arn.api_id, - stage=arn.stage + stage=arn.stage, ) - if user is None: - # No user was found, so we return not authorized - policy.deny_all_routes() - return policy.asdict() - - # Found the user and setting the details in the context - policy.context = user - # Conditional IAM Policy if user.get("isAdmin", False): policy.allow_all_routes() diff --git a/tests/functional/data_classes/test_api_gateway_authorizer.py b/tests/functional/data_classes/test_api_gateway_authorizer.py index 7dac6cb7791..b7584ccc4a8 100644 --- a/tests/functional/data_classes/test_api_gateway_authorizer.py +++ b/tests/functional/data_classes/test_api_gateway_authorizer.py @@ -1,6 +1,7 @@ import pytest from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( + DENY_ALL_RESPONSE, APIGatewayAuthorizerResponse, HttpVerb, ) @@ -36,7 +37,8 @@ def test_authorizer_response_invalid_resource(builder: APIGatewayAuthorizerRespo def test_authorizer_response_allow_all_routes_with_context(): - builder = APIGatewayAuthorizerResponse("foo", "us-west-1", "123456789", "fantom", "dev", {"name": "Foo"}) + arn = "arn:aws:execute-api:us-west-1:123456789:fantom/dev/GET/foo" + builder = APIGatewayAuthorizerResponse.from_route_arn(arn, principal_id="foo", context={"name": "Foo"}) builder.allow_all_routes() assert builder.asdict() == { "principalId": "foo", @@ -54,6 +56,26 @@ def test_authorizer_response_allow_all_routes_with_context(): } +def test_authorizer_response_allow_all_routes_with_usage_identifier_key(): + arn = "arn:aws:execute-api:us-east-1:1111111111:api/dev/ANY/y" + builder = APIGatewayAuthorizerResponse.from_route_arn(arn, principal_id="cow", usage_identifier_key="key") + builder.allow_all_routes() + assert builder.asdict() == { + "principalId": "cow", + "policyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Action": "execute-api:Invoke", + "Effect": "Allow", + "Resource": ["arn:aws:execute-api:us-east-1:1111111111:api/dev/*/*"], + } + ], + }, + "usageIdentifierKey": "key", + } + + def test_authorizer_response_deny_all_routes(builder: APIGatewayAuthorizerResponse): builder.deny_all_routes() assert builder.asdict() == { @@ -145,3 +167,14 @@ def test_authorizer_response_deny_route_with_conditions(builder: APIGatewayAutho ], }, } + + +def test_deny_all(): + # CHECK we always explicitly deny all + statements = DENY_ALL_RESPONSE["policyDocument"]["Statement"] + assert len(statements) == 1 + assert statements[0] == { + "Action": "execute-api:Invoke", + "Effect": "Deny", + "Resource": ["*"], + } From 4f0ad449528fd22f0f9b7fa704321860aea1729f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Sep 2021 10:55:36 +0000 Subject: [PATCH 05/40] chore(deps): bump boto3 from 1.18.32 to 1.18.38 (#671) Bumps [boto3](https://github.com/boto/boto3) from 1.18.32 to 1.18.38.
Changelog

Sourced from boto3's changelog.

1.18.38

  • api-change:opensearch: [botocore] Updated Configuration APIs for Amazon OpenSearch Service (successor to Amazon Elasticsearch Service)
  • api-change:ram: [botocore] A minor text-only update that fixes several customer issues.
  • api-change:kafka: [botocore] Amazon MSK has added a new API that allows you to update the encrypting and authentication settings for an existing cluster.

1.18.37

  • api-change:elasticache: [botocore] Doc only update for ElastiCache
  • api-change:amp: [botocore] This release adds tagging support for Amazon Managed Service for Prometheus workspace.
  • api-change:forecast: [botocore] Predictor creation now supports selecting an accuracy metric to optimize in AutoML and hyperparameter optimization. This release adds additional accuracy metrics for predictors - AverageWeightedQuantileLoss, MAPE and MASE.
  • api-change:xray: [botocore] Updated references to AWS KMS keys and customer managed keys to reflect current terminology.
  • api-change:ssm-contacts: [botocore] Added SDK examples for SSM-Contacts.
  • api-change:mediapackage: [botocore] SPEKE v2 support for live CMAF packaging type. SPEKE v2 is an upgrade to the existing SPEKE API to support multiple encryption keys, it supports live DASH currently.
  • api-change:eks: [botocore] Adding RegisterCluster and DeregisterCluster operations, to support connecting external clusters to EKS.

1.18.36

  • api-change:chime-sdk-identity: [botocore] Documentation updates for Chime
  • api-change:chime-sdk-messaging: [botocore] Documentation updates for Chime
  • api-change:outposts: [botocore] This release adds a new API CreateOrder.
  • api-change:frauddetector: [botocore] Enhanced GetEventPrediction API response to include risk scores from imported SageMaker models
  • api-change:codeguru-reviewer: [botocore] Added support for CodeInconsistencies detectors

1.18.35

  • api-change:acm-pca: [botocore] Private Certificate Authority Service now allows customers to enable an online certificate status protocol (OCSP) responder service on their private certificate authorities. Customers can also optionally configure a custom CNAME for their OCSP responder.
  • api-change:s3control: [botocore] S3 Multi-Region Access Points provide a single global endpoint to access a data set that spans multiple S3 buckets in different AWS Regions.
  • api-change:accessanalyzer: [botocore] Updates service API, documentation, and paginators to support multi-region access points from Amazon S3.
  • api-change:schemas: [botocore] This update include the support for Schema Discoverer to discover the events sent to the bus from another account. The feature will be enabled by default when discoverer is created or updated but can also be opt-in or opt-out by specifying the value for crossAccount.
  • api-change:securityhub: [botocore] New ASFF Resources: AwsAutoScalingLaunchConfiguration, AwsEc2VpnConnection, AwsEcrContainerImage. Added KeyRotationStatus to AwsKmsKey. Added AccessControlList, BucketLoggingConfiguration,BucketNotificationConfiguration and BucketNotificationConfiguration to AwsS3Bucket.
  • enhancement:s3: [botocore] Added support for S3 Multi-Region Access Points
  • api-change:efs: [botocore] Update efs client to latest version
  • api-change:transfer: [botocore] AWS Transfer Family introduces Managed Workflows for creating, executing, monitoring, and standardizing post file transfer processing
  • api-change:ebs: [botocore] Documentation updates for Amazon EBS direct APIs.
  • api-change:quicksight: [botocore] This release adds support for referencing parent datasets as sources in a child dataset.
  • api-change:fsx: [botocore] Announcing Amazon FSx for NetApp ONTAP, a new service that provides fully managed shared storage in the AWS Cloud with the data access and management capabilities of ONTAP.
  • enhancement:Signers: [botocore] Added support for Sigv4a Signing Algorithm
  • api-change:lex-models: [botocore] Lex now supports Korean (ko-KR) locale.

1.18.34

... (truncated)

Commits
  • 884c0c5 Merge branch 'release-1.18.38'
  • ca64d44 Bumping version to 1.18.38
  • 9c4aff7 Add changelog entries from botocore
  • 9cf4273 Merge branch 'release-1.18.37'
  • 91f947a Merge branch 'release-1.18.37' into develop
  • 01b9d30 Bumping version to 1.18.37
  • d48b72f Add changelog entries from botocore
  • 96f9e92 Merge branch 'release-1.18.36'
  • 7a2ffaa Merge branch 'release-1.18.36' into develop
  • 1961d9d Bumping version to 1.18.36
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.18.32&new-version=1.18.38)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index c1d312ace6d..ce41e735448 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,14 +81,14 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.18.32" +version = "1.18.38" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.21.32,<1.22.0" +botocore = ">=1.21.38,<1.22.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -97,7 +97,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.21.32" +version = "1.21.38" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1086,12 +1086,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.18.32-py3-none-any.whl", hash = "sha256:a299d0c6b5a30dc2e823944286ec782aec415d83965a51f97fc9a779a04ff194"}, - {file = "boto3-1.18.32.tar.gz", hash = "sha256:f4b17a2b6e04e5ec6f494e643d05b06dd60c88943f33d6f9650dd9e7f89a7022"}, + {file = "boto3-1.18.38-py3-none-any.whl", hash = "sha256:a7d831c65e0216ca5f1b06dbb6d8441e8f3926a7a535677bd257fed481cd2f7a"}, + {file = "boto3-1.18.38.tar.gz", hash = "sha256:0d576a1b1288825a8ecac62e4eec0c4f6679c117e05575e7e0f66eb2f010450d"}, ] botocore = [ - {file = "botocore-1.21.32-py3-none-any.whl", hash = "sha256:5803bf852304a301de41dccc3c0431053354144f3aefc7571dbe240a4288d3c5"}, - {file = "botocore-1.21.32.tar.gz", hash = "sha256:95ff61534b2a423d0e70067c39615e4e70c119773d2180d7254bf4025c54396d"}, + {file = "botocore-1.21.38-py3-none-any.whl", hash = "sha256:beefe7dee5020e1f7cda84685131533324742529d8a5686ce959047e1b3e3928"}, + {file = "botocore-1.21.38.tar.gz", hash = "sha256:5171b7db1c3346dd687ac0a195f69538c05ee0c2c26510de2019d0a0949297bf"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From 00c66b7dbbc32451053eead10a39607a2ba27c5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Sep 2021 17:21:51 -0300 Subject: [PATCH 06/40] chore(deps-dev): bump radon from 4.5.2 to 5.1.0 (#673) --- poetry.lock | 24 ++++-------------------- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 21 deletions(-) diff --git a/poetry.lock b/poetry.lock index ce41e735448..67287ba1cb1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -333,17 +333,6 @@ testfixtures = ">=6.8.0,<7" [package.extras] test = ["pytest (>=4.0.2,<6)", "toml"] -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -flake8 = "*" - [[package]] name = "flake8-variables-names" version = "0.0.4" @@ -856,7 +845,7 @@ pyyaml = "*" [[package]] name = "radon" -version = "4.5.2" +version = "5.1.0" description = "Code Metrics in Python" category = "dev" optional = false @@ -864,7 +853,6 @@ python-versions = "*" [package.dependencies] colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} -flake8-polyfill = "*" future = "*" mando = ">=0.6,<0.7" @@ -1059,7 +1047,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "3716d65cd1018286bba842e20603d4793dcbb0119a4c6a9b5542166dfa3261d0" +content-hash = "d689c0fca384782c305b50cacab5039f4cb26f6f0d662650abcea198b2b41a36" [metadata.files] appdirs = [ @@ -1218,10 +1206,6 @@ flake8-isort = [ {file = "flake8-isort-4.0.0.tar.gz", hash = "sha256:2b91300f4f1926b396c2c90185844eb1a3d5ec39ea6138832d119da0a208f4d9"}, {file = "flake8_isort-4.0.0-py2.py3-none-any.whl", hash = "sha256:729cd6ef9ba3659512dee337687c05d79c78e1215fdf921ed67e5fe46cce2f3c"}, ] -flake8-polyfill = [ - {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, - {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, -] flake8-variables-names = [ {file = "flake8_variables_names-0.0.4.tar.gz", hash = "sha256:d6fa0571a807c72940b5773827c5760421ea6f8206595ff0a8ecfa01e42bf2cf"}, ] @@ -1511,8 +1495,8 @@ pyyaml-env-tag = [ {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] radon = [ - {file = "radon-4.5.2-py2.py3-none-any.whl", hash = "sha256:0fc191bfb6938e67f881764f7242c163fb3c78fc7acdfc5a0b8254c66ff9dc8b"}, - {file = "radon-4.5.2.tar.gz", hash = "sha256:63b863dd294fcc86f6aecace8d7cb4228acc2a16ab0b89c11ff60cb14182b488"}, + {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, + {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, ] regex = [ {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, diff --git a/pyproject.toml b/pyproject.toml index 4c9c4738d89..02e7d665156 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ pytest-mock = "^3.5.1" pdoc3 = "^0.10.0" pytest-asyncio = "^0.15.1" bandit = "^1.7.0" -radon = "^4.5.0" +radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.1.0" flake8-bugbear = "^21.3.2" From 180ff34388403faafb1858ddf366c2024c0bb7f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Sep 2021 22:00:05 +0000 Subject: [PATCH 07/40] chore(deps): bump boto3 from 1.18.38 to 1.18.41 (#677) Bumps [boto3](https://github.com/boto/boto3) from 1.18.38 to 1.18.41.
Changelog

Sourced from boto3's changelog.

1.18.41

  • api-change:iot: [botocore] AWS IoT Rules Engine adds OpenSearch action. The OpenSearch rule action lets you stream data from IoT sensors and applications to Amazon OpenSearch Service which is a successor to Amazon Elasticsearch Service.
  • api-change:ec2: [botocore] Adds support for T3 instances on Amazon EC2 Dedicated Hosts.
  • enhancement:Tagged Unions: [botocore] Introducing support for the union trait on structures in request and response objects.

1.18.40

  • api-change:cloudformation: [botocore] Doc only update for CloudFormation that fixes several customer-reported issues.
  • api-change:rds: [botocore] This release adds support for providing a custom timeout value for finding a scaling point during autoscaling in Aurora Serverless v1.
  • api-change:ecr: [botocore] This release updates terminology around KMS keys.
  • api-change:sagemaker: [botocore] This release adds support for "Lifecycle Configurations" to SageMaker Studio
  • api-change:transcribe: [botocore] This release adds an API option for startTranscriptionJob and startMedicalTranscriptionJob that allows the user to specify encryption context key value pairs for batch jobs.
  • api-change:quicksight: [botocore] Add new data source type for Amazon OpenSearch (successor to Amazon ElasticSearch).

1.18.39

  • api-change:emr: [botocore] Update emr client to latest version
  • api-change:codeguru-reviewer: [botocore] The Amazon CodeGuru Reviewer API now includes the RuleMetadata data object and a Severity attribute on a RecommendationSummary object. A RuleMetadata object contains information about a rule that generates a recommendation. Severity indicates how severe the issue associated with a recommendation is.
  • api-change:lookoutequipment: [botocore] Added OffCondition parameter to CreateModel API
Commits
  • 4ef0671 Merge branch 'release-1.18.41'
  • 21855ed Bumping version to 1.18.41
  • a1f9dfd Add changelog entries from botocore
  • 7f827c4 Merge branch 'release-1.18.40'
  • 7b11ddb Merge branch 'release-1.18.40' into develop
  • d110470 Bumping version to 1.18.40
  • fdd3f7e Add changelog entries from botocore
  • 29e38a0 Merge branch 'release-1.18.39'
  • 496f34e Merge branch 'release-1.18.39' into develop
  • c53d61d Bumping version to 1.18.39
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.18.38&new-version=1.18.41)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 67287ba1cb1..8051928fdb1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,14 +81,14 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.18.38" +version = "1.18.41" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.21.38,<1.22.0" +botocore = ">=1.21.41,<1.22.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -97,7 +97,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.21.38" +version = "1.21.41" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1074,12 +1074,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.18.38-py3-none-any.whl", hash = "sha256:a7d831c65e0216ca5f1b06dbb6d8441e8f3926a7a535677bd257fed481cd2f7a"}, - {file = "boto3-1.18.38.tar.gz", hash = "sha256:0d576a1b1288825a8ecac62e4eec0c4f6679c117e05575e7e0f66eb2f010450d"}, + {file = "boto3-1.18.41-py3-none-any.whl", hash = "sha256:aaa6ba286d92fb03f27dd619220c6c1de2c010f39cac7afa72f505f073a31db1"}, + {file = "boto3-1.18.41.tar.gz", hash = "sha256:44f73009506dba227e0d421e4fc44a863d8ff315aaa47d9a7be6c549a6a88a12"}, ] botocore = [ - {file = "botocore-1.21.38-py3-none-any.whl", hash = "sha256:beefe7dee5020e1f7cda84685131533324742529d8a5686ce959047e1b3e3928"}, - {file = "botocore-1.21.38.tar.gz", hash = "sha256:5171b7db1c3346dd687ac0a195f69538c05ee0c2c26510de2019d0a0949297bf"}, + {file = "botocore-1.21.41-py3-none-any.whl", hash = "sha256:efad68a52ee2d939618e0fcb3da0a46dff10cb2e0e128c1e2749bbfc58953a12"}, + {file = "botocore-1.21.41.tar.gz", hash = "sha256:b877f9175843939db6fde3864ffc47611863710b85dc0336bb2433e921dc8790"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From 505891d23c4bdfbdcecbb93e7060d39ec6eb0725 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Sep 2021 22:00:56 +0000 Subject: [PATCH 08/40] chore(deps-dev): bump flake8-bugbear from 21.4.3 to 21.9.1 (#676) Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 21.4.3 to 21.9.1.
Release notes

Sourced from flake8-bugbear's releases.

21.9.1

  • Update B008: Whitelist more immutable function calls (#173)
  • Remove Python Compatibility Warnings (#182)
  • Add B904: check for raise without from in an except clause (#181)
  • Add Python 3.10 tests to ensure we pass (#183)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=flake8-bugbear&package-manager=pip&previous-version=21.4.3&new-version=21.9.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8051928fdb1..3825ae847fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -244,7 +244,7 @@ toml = "*" [[package]] name = "flake8-bugbear" -version = "21.4.3" +version = "21.9.1" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -1047,7 +1047,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "d689c0fca384782c305b50cacab5039f4cb26f6f0d662650abcea198b2b41a36" +content-hash = "e606dc6ff5241ca36361c01511a4780cc59ad98365ff221669057c576ce12262" [metadata.files] appdirs = [ @@ -1179,8 +1179,8 @@ flake8-black = [ {file = "flake8_black-0.2.3-py3-none-any.whl", hash = "sha256:cc080ba5b3773b69ba102b6617a00cc4ecbad8914109690cfda4d565ea435d96"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-21.4.3.tar.gz", hash = "sha256:2346c81f889955b39e4a368eb7d508de723d9de05716c287dc860a4073dc57e7"}, - {file = "flake8_bugbear-21.4.3-py36.py37.py38-none-any.whl", hash = "sha256:4f305dca96be62bf732a218fe6f1825472a621d3452c5b994d8f89dae21dbafa"}, + {file = "flake8-bugbear-21.9.1.tar.gz", hash = "sha256:2f60c8ce0dc53d51da119faab2d67dea978227f0f92ed3c44eb7d65fb2e06a96"}, + {file = "flake8_bugbear-21.9.1-py36.py37.py38-none-any.whl", hash = "sha256:45bfdccfb9f2d8aa140e33cac8f46f1e38215c13d5aa8650e7e188d84e2f94c6"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, diff --git a/pyproject.toml b/pyproject.toml index 02e7d665156..777314dbe53 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ bandit = "^1.7.0" radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.1.0" -flake8-bugbear = "^21.3.2" +flake8-bugbear = "^21.9.1" mkdocs-material = "^7.2.6" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" From 79414c8164e839cdfe1839c2c9f0d590df33f4b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Sep 2021 21:31:32 +0000 Subject: [PATCH 09/40] chore(deps-dev): bump mkdocs-material from 7.2.6 to 7.2.8 (#682) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 7.2.6 to 7.2.8.
Release notes

Sourced from mkdocs-material's releases.

mkdocs-material-7.2.8

  • Fixed #3039: Search modal overlays menu on mobile (7.2.7 regression)

mkdocs-material-7.2.7

  • Updated Serbian and Serbo-Croatian translations
  • Improved appearance of outline on details
  • Fixed #2934: Scrollbar when header is hidden on some mobile browsers
  • Fixed #3032: Anchor in details doesn't open on load (7.0.0 regression)
  • Fixed back-to-top button being focusable when invisible
  • Fixed broken admonition icons (removed in upstream)
Changelog

Sourced from mkdocs-material's changelog.

7.2.8 _ September 20, 2021

  • Fixed #3039: Search modal overlays menu on mobile (7.2.7 regression)

7.2.7 _ September 19, 2021

  • Updated Serbian and Serbo-Croatian translations
  • Improved appearance of outline on details
  • Fixed #2934: Scrollbar when header is hidden on some mobile browsers
  • Fixed #3032: Anchor in details doesn't open on load (7.0.0 regression)
  • Fixed back-to-top button being focusable when invisible
  • Fixed broken admonition icons (removed in upstream)
Commits
  • 166096d Prepare 7.2.8 release
  • 2eabc94 Fixed search modal overlaying menu on mobile (7.2.7 regression)
  • ea130d1 Added documentation for third-party Docker image
  • 96cc7b6 Updated Insiders changelog
  • e580eb2 Updated Insiders changelog
  • 7c0a251 Prepare 7.2.7 release
  • 36f1e99 Updated dependencies
  • 7495589 Added distribution files
  • 090d0ed Improved Serbian translations
  • d678678 Fixed scrollbar on mobile when header is hidden on some browsers
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mkdocs-material&package-manager=pip&previous-version=7.2.6&new-version=7.2.8)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3825ae847fb..ccc0866a789 100644 --- a/poetry.lock +++ b/poetry.lock @@ -577,7 +577,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.2.6" +version = "7.2.8" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -1047,7 +1047,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "e606dc6ff5241ca36361c01511a4780cc59ad98365ff221669057c576ce12262" +content-hash = "2d7e2681470b4f5d561759c6234d25ed6fa21fb5628ec5d04e86902dcd2d7a7f" [metadata.files] appdirs = [ @@ -1336,8 +1336,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.2.6.tar.gz", hash = "sha256:4bdeff63904680865676ceb3193216934de0b33fa5b2446e0a84ade60929ee54"}, - {file = "mkdocs_material-7.2.6-py2.py3-none-any.whl", hash = "sha256:4c6939b9d7d5c6db948ab02df8525c64211828ddf33286acea8b9d2115cec369"}, + {file = "mkdocs-material-7.2.8.tar.gz", hash = "sha256:b9a3d1cda28310acf74842a227fedc90d26e407742d3d7ffc0cdcf0560d22ed3"}, + {file = "mkdocs_material-7.2.8-py2.py3-none-any.whl", hash = "sha256:ffa1ccde3f58b955d65a3420a18c7739178722150790bbd1b08393b0b81ada55"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, diff --git a/pyproject.toml b/pyproject.toml index 777314dbe53..72f16082ccb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.1.0" flake8-bugbear = "^21.9.1" -mkdocs-material = "^7.2.6" +mkdocs-material = "^7.2.8" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" mypy = "^0.910" From 4ab9f49709b6f094c9cd9eaca1855206dc037095 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 23 Sep 2021 20:17:20 +0000 Subject: [PATCH 10/40] chore(deps-dev): bump mkdocs-material from 7.2.8 to 7.3.0 (#695) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 7.2.8 to 7.3.0.
Release notes

Sourced from mkdocs-material's releases.

mkdocs-material-7.3.0

  • Added support for sticky navigation tabs
  • Added support for section index pages
  • Added support for removing generator notice
Changelog

Sourced from mkdocs-material's changelog.

7.3.0 _ September 23, 2021

  • Added support for sticky navigation tabs
  • Added support for section index pages
  • Added support for removing generator notice
Commits
  • ba1f021 Prepare 7.3.0 release
  • 9d8ebcd Added missing distribution files
  • 377b4d6 Merge of Insiders features tied to 'Caribbean Red' funding goal
  • 62742b4 Formatting
  • 8fa40fa Updated documentation for language overrides
  • 174cb04 docs: clarify overriding of translations for non-English language
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mkdocs-material&package-manager=pip&previous-version=7.2.8&new-version=7.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ccc0866a789..22784e5754d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -577,7 +577,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.2.8" +version = "7.3.0" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -1047,7 +1047,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "2d7e2681470b4f5d561759c6234d25ed6fa21fb5628ec5d04e86902dcd2d7a7f" +content-hash = "ddec4b961b63336adb798a590c98df41a874da5ee4b0e37993c3c3a0437dc839" [metadata.files] appdirs = [ @@ -1336,8 +1336,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.2.8.tar.gz", hash = "sha256:b9a3d1cda28310acf74842a227fedc90d26e407742d3d7ffc0cdcf0560d22ed3"}, - {file = "mkdocs_material-7.2.8-py2.py3-none-any.whl", hash = "sha256:ffa1ccde3f58b955d65a3420a18c7739178722150790bbd1b08393b0b81ada55"}, + {file = "mkdocs-material-7.3.0.tar.gz", hash = "sha256:07db0580fa96c3473aee99ec3fb4606a1a5a1e4f4467e64c0cd1ba8da5b6476e"}, + {file = "mkdocs_material-7.3.0-py2.py3-none-any.whl", hash = "sha256:b183c27dc0f44e631bbc32c51057f61a3e2ba8b3c1080e59f944167eeba9ff1d"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, diff --git a/pyproject.toml b/pyproject.toml index 72f16082ccb..c5197c626ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.1.0" flake8-bugbear = "^21.9.1" -mkdocs-material = "^7.2.8" +mkdocs-material = "^7.3.0" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" mypy = "^0.910" From e24eb0209a0ce0a2c805507dd3d66408770c3c21 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Thu, 23 Sep 2021 14:40:48 -0700 Subject: [PATCH 11/40] feat(validator): include missing data elements from a validation error (#686) --- .../utilities/validation/base.py | 15 ++++-- .../utilities/validation/exceptions.py | 50 +++++++++++++++++++ tests/functional/validator/test_validator.py | 20 +++++++- 3 files changed, 80 insertions(+), 5 deletions(-) diff --git a/aws_lambda_powertools/utilities/validation/base.py b/aws_lambda_powertools/utilities/validation/base.py index 13deb4d24e2..2a337b85971 100644 --- a/aws_lambda_powertools/utilities/validation/base.py +++ b/aws_lambda_powertools/utilities/validation/base.py @@ -32,6 +32,15 @@ def validate_data_against_schema(data: Union[Dict, str], schema: Dict, formats: fastjsonschema.validate(definition=schema, data=data, formats=formats) except (TypeError, AttributeError, fastjsonschema.JsonSchemaDefinitionException) as e: raise InvalidSchemaFormatError(f"Schema received: {schema}, Formats: {formats}. Error: {e}") - except fastjsonschema.JsonSchemaException as e: - message = f"Failed schema validation. Error: {e.message}, Path: {e.path}, Data: {e.value}" # noqa: B306, E501 - raise SchemaValidationError(message) + except fastjsonschema.JsonSchemaValueException as e: + message = f"Failed schema validation. Error: {e.message}, Path: {e.path}, Data: {e.value}" + raise SchemaValidationError( + message, + validation_message=e.message, + name=e.name, + path=e.path, + value=e.value, + definition=e.definition, + rule=e.rule, + rule_definition=e.rule_definition, + ) diff --git a/aws_lambda_powertools/utilities/validation/exceptions.py b/aws_lambda_powertools/utilities/validation/exceptions.py index d4aaa500ec7..7c719ca3119 100644 --- a/aws_lambda_powertools/utilities/validation/exceptions.py +++ b/aws_lambda_powertools/utilities/validation/exceptions.py @@ -1,9 +1,59 @@ +from typing import Any, List, Optional + from ...exceptions import InvalidEnvelopeExpressionError class SchemaValidationError(Exception): """When serialization fail schema validation""" + def __init__( + self, + message: str, + validation_message: Optional[str] = None, + name: Optional[str] = None, + path: Optional[List] = None, + value: Optional[Any] = None, + definition: Optional[Any] = None, + rule: Optional[str] = None, + rule_definition: Optional[Any] = None, + ): + """ + + Parameters + ---------- + message : str + Powertools formatted error message + validation_message : str, optional + Containing human-readable information what is wrong + (e.g. `data.property[index] must be smaller than or equal to 42`) + name : str, optional + name of a path in the data structure + (e.g. `data.property[index]`) + path: List, optional + `path` as an array in the data structure + (e.g. `['data', 'property', 'index']`), + value : Any, optional + The invalid value + definition : Any, optional + The full rule `definition` + (e.g. `42`) + rule : str, optional + `rule` which the `data` is breaking + (e.g. `maximum`) + rule_definition : Any, optional + The specific rule `definition` + (e.g. `42`) + """ + super().__init__(message) + self.message = message + self.validation_message = validation_message + self.name = name + self.path = path + self.value = value + self.definition = definition + self.rule = rule + self.rule_definition = rule_definition + class InvalidSchemaFormatError(Exception): """When JSON Schema is in invalid format""" diff --git a/tests/functional/validator/test_validator.py b/tests/functional/validator/test_validator.py index d8986ba90de..cd5c4168f56 100644 --- a/tests/functional/validator/test_validator.py +++ b/tests/functional/validator/test_validator.py @@ -1,3 +1,5 @@ +import re + import jmespath import pytest from jmespath import functions @@ -22,8 +24,22 @@ def test_validate_base64_string_envelope(schema, wrapped_event_base64_json_strin def test_validate_event_does_not_conform_with_schema(schema): - with pytest.raises(exceptions.SchemaValidationError): - validate(event={"message": "hello_world"}, schema=schema) + data = {"message": "hello_world"} + message = "data must contain ['message', 'username'] properties" + with pytest.raises( + exceptions.SchemaValidationError, + match=re.escape(f"Failed schema validation. Error: {message}, Path: ['data'], Data: {data}"), + ) as e: + validate(event=data, schema=schema) + + assert str(e.value) == e.value.message + assert e.value.validation_message == message + assert e.value.name == "data" + assert e.value.path is not None + assert e.value.value == data + assert e.value.definition == schema + assert e.value.rule == "required" + assert e.value.rule_definition == schema.get("required") def test_validate_json_string_no_envelope(schema, wrapped_event_json_string): From b4737a8b139aed01d36a7768d7946777041ce4e3 Mon Sep 17 00:00:00 2001 From: Adam Tankanow Date: Fri, 24 Sep 2021 07:17:15 -0400 Subject: [PATCH 12/40] ISSUE-693: Use ExpressionAttributeNames in _put_record (#697) --- .../utilities/idempotency/persistence/dynamodb.py | 3 ++- tests/functional/idempotency/conftest.py | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index ae3a1be490f..73f241bd613 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -121,7 +121,8 @@ def _put_record(self, data_record: DataRecord) -> None: logger.debug(f"Putting record for idempotency key: {data_record.idempotency_key}") self.table.put_item( Item=item, - ConditionExpression=f"attribute_not_exists({self.key_attr}) OR {self.expiry_attr} < :now", + ConditionExpression="attribute_not_exists(#id) OR #now < :now", + ExpressionAttributeNames={"#id": self.key_attr, "#now": self.expiry_attr}, ExpressionAttributeValues={":now": int(now.timestamp())}, ) except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index 2c528cafc50..f563b4bbcda 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -122,7 +122,8 @@ def expected_params_update_item_with_validation( @pytest.fixture def expected_params_put_item(hashed_idempotency_key): return { - "ConditionExpression": "attribute_not_exists(id) OR expiration < :now", + "ConditionExpression": "attribute_not_exists(#id) OR #now < :now", + "ExpressionAttributeNames": {"#id": "id", "#now": "expiration"}, "ExpressionAttributeValues": {":now": stub.ANY}, "Item": {"expiration": stub.ANY, "id": hashed_idempotency_key, "status": "INPROGRESS"}, "TableName": "TEST_TABLE", @@ -132,7 +133,8 @@ def expected_params_put_item(hashed_idempotency_key): @pytest.fixture def expected_params_put_item_with_validation(hashed_idempotency_key, hashed_validation_key): return { - "ConditionExpression": "attribute_not_exists(id) OR expiration < :now", + "ConditionExpression": "attribute_not_exists(#id) OR #now < :now", + "ExpressionAttributeNames": {"#id": "id", "#now": "expiration"}, "ExpressionAttributeValues": {":now": stub.ANY}, "Item": { "expiration": stub.ANY, From a4c3fd108b4e3fb222e9ad65be812a49faf39036 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 27 Sep 2021 09:09:57 -0300 Subject: [PATCH 13/40] docs: add team behind it and email --- README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 46a3671f93b..893c50f378b 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,11 @@ A suite of Python utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. ([AWS Lambda Powertools Java](https://github.com/awslabs/aws-lambda-powertools-java) is also available). + + **[📜Documentation](https://awslabs.github.io/aws-lambda-powertools-python/)** | **[🐍PyPi](https://pypi.org/project/aws-lambda-powertools/)** | **[Roadmap](https://github.com/awslabs/aws-lambda-powertools-roadmap/projects/1)** | **[Quick hello world example](https://github.com/aws-samples/cookiecutter-aws-sam-python)** | **[Detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/)** -> **Join us on the AWS Developers Slack at `#lambda-powertools`** - **[Invite, if you don't have an account](https://join.slack.com/t/awsdevelopers/shared_invite/zt-gu30gquv-EhwIYq3kHhhysaZ2aIX7ew)** +> **An AWS Developer Acceleration (DevAx) initiative by Specialist Solution Architects | aws-devax-open-source@amazon.com** ## Features @@ -42,6 +44,12 @@ With [pip](https://pip.pypa.io/en/latest/index.html) installed, run: ``pip insta * Structured logging initial implementation from [aws-lambda-logging](https://gitlab.com/hadrien/aws_lambda_logging) * Powertools idea [DAZN Powertools](https://github.com/getndazn/dazn-lambda-powertools/) + +## Connect + +* **AWS Developers Slack**: `#lambda-powertools`** - **[Invite, if you don't have an account](https://join.slack.com/t/awsdevelopers/shared_invite/zt-gu30gquv-EhwIYq3kHhhysaZ2aIX7ew)** +* **Email**: aws-lambda-powertools-feedback@amazon.com + ## License This library is licensed under the MIT-0 License. See the LICENSE file. From cec2f012ea6adff02518c4f26956ada76277af11 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 27 Sep 2021 14:21:19 +0200 Subject: [PATCH 14/40] chore: add python 3.9 support --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 893c50f378b..c4778595366 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ![Build](https://github.com/awslabs/aws-lambda-powertools/workflows/Powertools%20Python/badge.svg?branch=master) [![codecov.io](https://codecov.io/github/awslabs/aws-lambda-powertools-python/branch/develop/graphs/badge.svg)](https://app.codecov.io/gh/awslabs/aws-lambda-powertools-python) -![PythonSupport](https://img.shields.io/static/v1?label=python&message=3.6%20|%203.7|%203.8&color=blue?style=flat-square&logo=python) ![PyPI version](https://badge.fury.io/py/aws-lambda-powertools.svg) ![PyPi monthly downloads](https://img.shields.io/pypi/dm/aws-lambda-powertools) +![PythonSupport](https://img.shields.io/static/v1?label=python&message=3.6%20|%203.7|%203.8|%203.9&color=blue?style=flat-square&logo=python) ![PyPI version](https://badge.fury.io/py/aws-lambda-powertools.svg) ![PyPi monthly downloads](https://img.shields.io/pypi/dm/aws-lambda-powertools) A suite of Python utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, and more. ([AWS Lambda Powertools Java](https://github.com/awslabs/aws-lambda-powertools-java) is also available). From 977162a862d1baeb19c19f569bd2e18b38921422 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 28 Sep 2021 10:00:02 +0200 Subject: [PATCH 15/40] docs(event-handler): document catch-all routes (#705) --- docs/core/event_handler/api_gateway.md | 38 ++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 76a72fd03c7..aeaa75e0d2a 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -287,6 +287,8 @@ You can use `/path/{dynamic_value}` when configuring dynamic URL paths. This all } ``` +#### Nested routes + You can also nest paths as configured earlier in [our sample infrastructure](#required-resources): `/{message}/{name}`. === "app.py" @@ -323,6 +325,42 @@ You can also nest paths as configured earlier in [our sample infrastructure](#re } ``` +#### Catch-all routes + +!!! note "We recommend having explicit routes whenever possible; use catch-all routes sparingly" + +You can use a regex string to handle an arbitrary number of paths within a request, for example `.+`. + +You can also combine nested paths with greedy regex to catch in between routes. + +!!! warning "We will choose the more explicit registered route that match incoming event" + +=== "app.py" + + ```python hl_lines="5" + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + + app = ApiGatewayResolver() + + @app.get(".+") + def catch_any_route_after_any(): + return {"path_received": app.current_event.path} + + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "sample_request.json" + + ```json + { + "resource": "/any/route/should/work", + "path": "/any/route/should/work", + "httpMethod": "GET", + ... + } + ``` + ### Accessing request details By integrating with [Data classes utilities](../../utilities/data_classes.md){target="_blank"}, you have access to request details, Lambda context and also some convenient methods. From 204eb98526fe162253746bb55aff46098c36ee42 Mon Sep 17 00:00:00 2001 From: walmsles <2704782+walmsles@users.noreply.github.com> Date: Tue, 28 Sep 2021 18:46:01 +1000 Subject: [PATCH 16/40] docs(idempotency): fix misleading idempotent examples (#661) Co-authored-by: Tom McCarthy --- docs/utilities/idempotency.md | 10 +- docs/utilities/jmespath_functions.md | 166 +++++++++++++++++++++++++++ docs/utilities/validation.md | 124 +------------------- mkdocs.yml | 1 + 4 files changed, 176 insertions(+), 125 deletions(-) create mode 100644 docs/utilities/jmespath_functions.md diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 495fe626d4f..a9a5a129e63 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -206,6 +206,11 @@ In this example, we have a Lambda handler that creates a payment for a user subs Imagine the function executes successfully, but the client never receives the response due to a connection issue. It is safe to retry in this instance, as the idempotent decorator will return a previously saved response. +!!! warning "Idempotency for JSON payloads" + The payload extracted by the `event_key_jmespath` is treated as a string by default, so will be sensitive to differences in whitespace even when the JSON payload itself is identical. + + To alter this behaviour, we can use the [JMESPath built-in function](/utilities/jmespath_functions) *powertools_json()* to treat the payload as a JSON object rather than a string. + === "payment.py" ```python hl_lines="2-4 10 12 15 20" @@ -218,7 +223,7 @@ Imagine the function executes successfully, but the client never receives the re # Treat everything under the "body" key # in the event json object as our payload - config = IdempotencyConfig(event_key_jmespath="body") + config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") @idempotent(config=config, persistence_store=persistence_layer) def handler(event, context): @@ -270,6 +275,7 @@ Imagine the function executes successfully, but the client never receives the re } ``` + ### Idempotency request flow This sequence diagram shows an example flow of what happens in the payment scenario: @@ -334,7 +340,7 @@ Idempotent decorator can be further configured with **`IdempotencyConfig`** as s Parameter | Default | Description ------------------------------------------------- | ------------------------------------------------- | --------------------------------------------------------------------------------- -**event_key_jmespath** | `""` | JMESPath expression to extract the idempotency key from the event record +**event_key_jmespath** | `""` | JMESPath expression to extract the idempotency key from the event record using [built-in functions](/utilities/jmespath_functions) **payload_validation_jmespath** | `""` | JMESPath expression to validate whether certain parameters have changed in the event while the event payload **raise_on_no_idempotency_key** | `False` | Raise exception if no idempotency key was found in the request **expires_after_seconds** | 3600 | The number of seconds to wait before a record is expired diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md new file mode 100644 index 00000000000..7ef6b2b32b2 --- /dev/null +++ b/docs/utilities/jmespath_functions.md @@ -0,0 +1,166 @@ +--- +title: JMESPath Functions +description: Utility +--- + +You might have events or responses that contain non-encoded JSON, where you need to decode so that you can access portions of the object or ensure the Powertools utility receives a JSON object. This is a common use case when using the [validation](/utilities/validation) or [idempotency](/utilities/idempotency) utilities. + +## Built-in JMESPath functions +You can use our built-in JMESPath functions within your expressions to do exactly that to decode JSON Strings, base64, and uncompress gzip data. + +!!! info + We use these for built-in envelopes to easily decode and unwrap events from sources like API Gateway, Kinesis, CloudWatch Logs, etc. + +#### powertools_json function + +Use `powertools_json` function to decode any JSON String anywhere a JMESPath expression is allowed. + +> **Validation scenario** + +This sample will decode the value within the `data` key into a valid JSON before we can validate it. + +=== "powertools_json_jmespath_function.py" + + ```python hl_lines="9" + from aws_lambda_powertools.utilities.validation import validate + + import schemas + + sample_event = { + 'data': '{"payload": {"message": "hello hello", "username": "blah blah"}}' + } + + validate(event=sample_event, schema=schemas.INPUT, envelope="powertools_json(data)") + ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` + +> **Idempotency scenario** + +This sample will decode the value within the `body` key of an API Gateway event into a valid JSON object to ensure the Idempotency utility processes a JSON object instead of a string. + +=== "powertools_json_jmespath_function.py" + + ```python hl_lines="8" + import json + from aws_lambda_powertools.utilities.idempotency import ( + IdempotencyConfig, DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") + @idempotent(config=config, persistence_store=persistence_layer) + def handler(event:APIGatewayProxyEvent, context): + body = json.loads(event['body']) + payment = create_subscription_payment( + user=body['user'], + product=body['product_id'] + ) + ... + return { + "payment_id": payment.id, + "message": "success", + "statusCode": 200 + } + ``` + +#### powertools_base64 function + +Use `powertools_base64` function to decode any base64 data. + +This sample will decode the base64 value within the `data` key, and decode the JSON string into a valid JSON before we can validate it. + +=== "powertools_json_jmespath_function.py" + + ```python hl_lines="12" + from aws_lambda_powertools.utilities.validation import validate + + import schemas + + sample_event = { + "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=" + } + + validate( + event=sample_event, + schema=schemas.INPUT, + envelope="powertools_json(powertools_base64(data))" + ) + ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` + +#### powertools_base64_gzip function + +Use `powertools_base64_gzip` function to decompress and decode base64 data. + +This sample will decompress and decode base64 data, then use JMESPath pipeline expression to pass the result for decoding its JSON string. + +=== "powertools_json_jmespath_function.py" + + ```python hl_lines="12" + from aws_lambda_powertools.utilities.validation import validate + + import schemas + + sample_event = { + "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" + } + + validate( + event=sample_event, + schema=schemas.INPUT, + envelope="powertools_base64_gzip(data) | powertools_json(@)" + ) + ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` + +### Bring your own JMESPath function + +!!! warning + This should only be used for advanced use cases where you have special formats not covered by the built-in functions. + + This will **replace all provided built-in functions such as `powertools_json`, so you will no longer be able to use them**. + +For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. + +=== "custom_jmespath_function.py" + + ```python hl_lines="2 6-10 14" + from aws_lambda_powertools.utilities.validation import validator + from jmespath import functions + + import schemas + + class CustomFunctions(functions.Functions): + + @functions.signature({'types': ['string']}) + def _func_special_decoder(self, s): + return my_custom_decoder_logic(s) + + custom_jmespath_options = {"custom_functions": CustomFunctions()} + + @validator(schema=schemas.INPUT, jmespath_options=**custom_jmespath_options) + def handler(event, context): + return event + ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index 7df339b7503..73f1e085164 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -429,129 +429,7 @@ For each format defined in a dictionary key, you must use a regex, or a function You might have events or responses that contain non-encoded JSON, where you need to decode before validating them. -You can use our built-in JMESPath functions within your expressions to do exactly that to decode JSON Strings, base64, and uncompress gzip data. +You can use our built-in [JMESPath functions](/utilities/jmespath_functions) within your expressions to do exactly that to decode JSON Strings, base64, and uncompress gzip data. !!! info We use these for built-in envelopes to easily to decode and unwrap events from sources like Kinesis, CloudWatch Logs, etc. - -#### powertools_json function - -Use `powertools_json` function to decode any JSON String. - -This sample will decode the value within the `data` key into a valid JSON before we can validate it. - -=== "powertools_json_jmespath_function.py" - - ```python hl_lines="9" - from aws_lambda_powertools.utilities.validation import validate - - import schemas - - sample_event = { - 'data': '{"payload": {"message": "hello hello", "username": "blah blah"}}' - } - - validate(event=sample_event, schema=schemas.INPUT, envelope="powertools_json(data)") - ``` - -=== "schemas.py" - - ```python hl_lines="7 14 16 23 39 45 47 52" - --8<-- "docs/shared/validation_basic_jsonschema.py" - ``` - -#### powertools_base64 function - -Use `powertools_base64` function to decode any base64 data. - -This sample will decode the base64 value within the `data` key, and decode the JSON string into a valid JSON before we can validate it. - -=== "powertools_json_jmespath_function.py" - - ```python hl_lines="12" - from aws_lambda_powertools.utilities.validation import validate - - import schemas - - sample_event = { - "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=" - } - - validate( - event=sample_event, - schema=schemas.INPUT, - envelope="powertools_json(powertools_base64(data))" - ) - ``` - -=== "schemas.py" - - ```python hl_lines="7 14 16 23 39 45 47 52" - --8<-- "docs/shared/validation_basic_jsonschema.py" - ``` - -#### powertools_base64_gzip function - -Use `powertools_base64_gzip` function to decompress and decode base64 data. - -This sample will decompress and decode base64 data, then use JMESPath pipeline expression to pass the result for decoding its JSON string. - -=== "powertools_json_jmespath_function.py" - - ```python hl_lines="12" - from aws_lambda_powertools.utilities.validation import validate - - import schemas - - sample_event = { - "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" - } - - validate( - event=sample_event, - schema=schemas.INPUT, - envelope="powertools_base64_gzip(data) | powertools_json(@)" - ) - ``` - -=== "schemas.py" - - ```python hl_lines="7 14 16 23 39 45 47 52" - --8<-- "docs/shared/validation_basic_jsonschema.py" - ``` - -### Bring your own JMESPath function - -!!! warning - This should only be used for advanced use cases where you have special formats not covered by the built-in functions. - - This will **replace all provided built-in functions such as `powertools_json`, so you will no longer be able to use them**. - -For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. - -=== "custom_jmespath_function.py" - - ```python hl_lines="2 6-10 14" - from aws_lambda_powertools.utilities.validation import validator - from jmespath import functions - - import schemas - - class CustomFunctions(functions.Functions): - - @functions.signature({'types': ['string']}) - def _func_special_decoder(self, s): - return my_custom_decoder_logic(s) - - custom_jmespath_options = {"custom_functions": CustomFunctions()} - - @validator(schema=schemas.INPUT, jmespath_options=**custom_jmespath_options) - def handler(event, context): - return event - ``` - -=== "schemas.py" - - ```python hl_lines="7 14 16 23 39 45 47 52" - --8<-- "docs/shared/validation_basic_jsonschema.py" - ``` diff --git a/mkdocs.yml b/mkdocs.yml index 94dc9980cf1..b90ba4376de 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -26,6 +26,7 @@ nav: - utilities/parser.md - utilities/idempotency.md - utilities/feature_flags.md + - utilities/jmespath_functions.md theme: name: material From 4e3e00bb30ee2f9e4ebb11adf463f4e66b94dbf4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Sep 2021 11:48:58 +0200 Subject: [PATCH 17/40] chore(deps): bump codecov/codecov-action from 2.0.2 to 2.1.0 (#675) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 26fbaeb3c4e..6dc4446ee14 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -37,7 +37,7 @@ jobs: - name: Complexity baseline run: make complexity-baseline - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2.0.2 + uses: codecov/codecov-action@v2.1.0 with: file: ./coverage.xml # flags: unittests From 68e2c8ebdcf5c35d3deddb1b12672b594746a566 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Sep 2021 09:49:34 +0000 Subject: [PATCH 18/40] chore(deps): bump boto3 from 1.18.41 to 1.18.49 (#703) Bumps [boto3](https://github.com/boto/boto3) from 1.18.41 to 1.18.49.
Changelog

Sourced from boto3's changelog.

1.18.49

  • api-change:appintegrations: [botocore] The Amazon AppIntegrations service enables you to configure and reuse connections to external applications.
  • api-change:wisdom: [botocore] Released Amazon Connect Wisdom, a feature of Amazon Connect, which provides real-time recommendations and search functionality in general availability (GA). For more information, see https://docs.aws.amazon.com/wisdom/latest/APIReference/Welcome.html.
  • api-change:pinpoint: [botocore] Added support for journey with contact center activity
  • api-change:voice-id: [botocore] Released the Amazon Voice ID SDK, for usage with the Amazon Connect Voice ID feature released for Amazon Connect.
  • api-change:connect: [botocore] This release updates a set of APIs: CreateIntegrationAssociation, ListIntegrationAssociations, CreateUseCase, and StartOutboundVoiceContact. You can use it to create integrations with Amazon Pinpoint for the Amazon Connect Campaigns use case, Amazon Connect Voice ID, and Amazon Connect Wisdom.
  • api-change:elbv2: [botocore] Update elbv2 client to latest version

1.18.48

  • api-change:license-manager: [botocore] AWS License Manager now allows customers to get the LicenseArn in the Checkout API Response.
  • api-change:ec2: [botocore] DescribeInstances now returns Platform Details, Usage Operation, and Usage Operation Update Time.

1.18.47

  • api-change:mediaconvert: [botocore] This release adds style and positioning support for caption or subtitle burn-in from rich text sources such as TTML. This release also introduces configurable image-based trick play track generation.
  • api-change:appsync: [botocore] Documented the new OpenSearchServiceDataSourceConfig data type. Added deprecation notes to the ElasticsearchDataSourceConfig data type.
  • api-change:ssm: [botocore] Added cutoff behavior support for preventing new task invocations from starting when the maintenance window cutoff time is reached.

1.18.46

  • api-change:imagebuilder: [botocore] This feature adds support for specifying GP3 volume throughput and configuring instance metadata options for instances launched by EC2 Image Builder.
  • api-change:wafv2: [botocore] Added the regex match rule statement, for matching web requests against a single regular expression.
  • api-change:mediatailor: [botocore] This release adds support to configure logs for playback configuration.
  • api-change:lexv2-models: [botocore] Update lexv2-models client to latest version
  • api-change:iam: [botocore] Added changes to OIDC API about not using port numbers in the URL.
  • api-change:license-manager: [botocore] AWS License Manager now allows customers to change their Windows Server or SQL license types from Bring-Your-Own-License (BYOL) to License Included or vice-versa (using the customer's media).
  • api-change:mediapackage-vod: [botocore] MediaPackage VOD will now return the current processing statuses of an asset's endpoints. The status can be QUEUED, PROCESSING, PLAYABLE, or FAILED.

1.18.45

  • api-change:comprehend: [botocore] Amazon Comprehend now supports versioning of custom models, improved training with ONE_DOC_PER_FILE text documents for custom entity recognition, ability to provide specific test sets during training, and live migration to new model endpoints.
  • api-change:iot: [botocore] This release adds support for verifying, viewing and filtering AWS IoT Device Defender detect violations with four verification states.
  • api-change:ecr: [botocore] This release adds additional support for repository replication
  • api-change:ec2: [botocore] This update adds support for downloading configuration templates using new APIs (GetVpnConnectionDeviceTypes and GetVpnConnectionDeviceSampleConfiguration) and Internet Key Exchange version 2 (IKEv2) parameters for many popular CGW devices.

1.18.44

... (truncated)

Commits
  • 5ab9194 Merge branch 'release-1.18.49'
  • db3460e Bumping version to 1.18.49
  • 6a5bac3 Add changelog entries from botocore
  • 76a599b Add support for Python 3.10 (#3009)
  • fb82bde Merge branch 'release-1.18.48'
  • 4d4aa02 Merge branch 'release-1.18.48' into develop
  • 7f75005 Bumping version to 1.18.48
  • 8feb681 Add changelog entries from botocore
  • 485029e Declare support for Python 3.9 (#2942)
  • 09041c4 Convert functional tests to pytest
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.18.41&new-version=1.18.49)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 22784e5754d..2205938537e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,14 +81,14 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.18.41" +version = "1.18.49" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.21.41,<1.22.0" +botocore = ">=1.21.49,<1.22.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -97,7 +97,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.21.41" +version = "1.21.49" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -361,7 +361,7 @@ python-versions = "*" python-dateutil = ">=2.8.1" [package.extras] -dev = ["twine", "markdown", "flake8"] +dev = ["twine", "markdown", "flake8", "wheel"] [[package]] name = "gitdb" @@ -1074,12 +1074,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.18.41-py3-none-any.whl", hash = "sha256:aaa6ba286d92fb03f27dd619220c6c1de2c010f39cac7afa72f505f073a31db1"}, - {file = "boto3-1.18.41.tar.gz", hash = "sha256:44f73009506dba227e0d421e4fc44a863d8ff315aaa47d9a7be6c549a6a88a12"}, + {file = "boto3-1.18.49-py3-none-any.whl", hash = "sha256:1a2908d2829268f1b2355bad3a96bfdc8e41523629b5d958bcedfc35d2d232dd"}, + {file = "boto3-1.18.49.tar.gz", hash = "sha256:e520655c9caf2f21853644d88b59b1c32bc44ccd58b20574883b25eb6256d938"}, ] botocore = [ - {file = "botocore-1.21.41-py3-none-any.whl", hash = "sha256:efad68a52ee2d939618e0fcb3da0a46dff10cb2e0e128c1e2749bbfc58953a12"}, - {file = "botocore-1.21.41.tar.gz", hash = "sha256:b877f9175843939db6fde3864ffc47611863710b85dc0336bb2433e921dc8790"}, + {file = "botocore-1.21.49-py3-none-any.whl", hash = "sha256:eab89183f7d94cabacde79a266060bb9429249e33a39b7ba4c1b15c965095477"}, + {file = "botocore-1.21.49.tar.gz", hash = "sha256:0161c3b64e34315928aae7fdbce49e684c9c2cfad2435cb22023b7ad87306f12"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, @@ -1214,6 +1214,7 @@ future = [ ] ghp-import = [ {file = "ghp-import-2.0.1.tar.gz", hash = "sha256:753de2eace6e0f7d4edfb3cce5e3c3b98cd52aadb80163303d1d036bda7b4483"}, + {file = "ghp_import-2.0.1-py3-none-any.whl", hash = "sha256:8241a8e9f8dd3c1fafe9696e6e081b57a208ef907e9939c44e7415e407ab40ea"}, ] gitdb = [ {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"}, From 808ac71f1cd03494df373c7a906f94bf35a66dbf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Oct 2021 11:11:20 +0000 Subject: [PATCH 19/40] chore(deps): bump boto3 from 1.18.49 to 1.18.51 (#713) Bumps [boto3](https://github.com/boto/boto3) from 1.18.49 to 1.18.51.
Changelog

Sourced from boto3's changelog.

1.18.51

  • api-change:lambda: [botocore] Adds support for Lambda functions powered by AWS Graviton2 processors. Customers can now select the CPU architecture for their functions.
  • api-change:sesv2: [botocore] This release includes the ability to use 2048 bits RSA key pairs for DKIM in SES, either with Easy DKIM or Bring Your Own DKIM.
  • api-change:amp: [botocore] This release adds alert manager and rule group namespace APIs

1.18.50

  • api-change:transfer: [botocore] Added changes for managed workflows feature APIs.
  • api-change:imagebuilder: [botocore] Fix description for AmiDistributionConfiguration Name property, which actually refers to the output AMI name. Also updated for consistent terminology to use "base" image, and another update to fix description text.
Commits
  • dee188a Merge branch 'release-1.18.51'
  • f8c782a Bumping version to 1.18.51
  • da2cc18 Add changelog entries from botocore
  • 3518cfb Merge branch 'release-1.18.50'
  • 3d202fc Merge branch 'release-1.18.50' into develop
  • ac5e53c Bumping version to 1.18.50
  • dc280fd Add changelog entries from botocore
  • f009e3b Merge branch 'release-1.18.49' into develop
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.18.49&new-version=1.18.51)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2205938537e..ae56006b9c2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,14 +81,14 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.18.49" +version = "1.18.51" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.21.49,<1.22.0" +botocore = ">=1.21.51,<1.22.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -97,7 +97,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.21.49" +version = "1.21.51" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1074,12 +1074,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.18.49-py3-none-any.whl", hash = "sha256:1a2908d2829268f1b2355bad3a96bfdc8e41523629b5d958bcedfc35d2d232dd"}, - {file = "boto3-1.18.49.tar.gz", hash = "sha256:e520655c9caf2f21853644d88b59b1c32bc44ccd58b20574883b25eb6256d938"}, + {file = "boto3-1.18.51-py3-none-any.whl", hash = "sha256:83d6f539e0f0e0f0c532bb2b11d1e9c5055d1d806d64a61aff4f49399c294ee7"}, + {file = "boto3-1.18.51.tar.gz", hash = "sha256:98279095b1d08ee6d8d587f2c66fda6d560ad3046e98cd140c1aa8e1ed018c70"}, ] botocore = [ - {file = "botocore-1.21.49-py3-none-any.whl", hash = "sha256:eab89183f7d94cabacde79a266060bb9429249e33a39b7ba4c1b15c965095477"}, - {file = "botocore-1.21.49.tar.gz", hash = "sha256:0161c3b64e34315928aae7fdbce49e684c9c2cfad2435cb22023b7ad87306f12"}, + {file = "botocore-1.21.51-py3-none-any.whl", hash = "sha256:2089f9fa36a59d8c02435c49d58ccc7b3ceb9c0c054ea4f71631c3c3a1c5245e"}, + {file = "botocore-1.21.51.tar.gz", hash = "sha256:17a10dd33334e7e3aaa4e12f66317284f96bb53267e20bc877a187c442681772"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From dbe45da073ae2721ee1ae1c9f62db831121dc60d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Oct 2021 11:11:25 +0000 Subject: [PATCH 20/40] chore(deps-dev): bump flake8-bugbear from 21.9.1 to 21.9.2 (#712) Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 21.9.1 to 21.9.2.
Release notes

Sourced from flake8-bugbear's releases.

21.9.2

  • Fix crash on call in except statement in _to_name_str (#187)
  • Update B006: list, dictionary, and set comprehensions are now also disallowed (#186)
Commits
  • ace91cc Update setup.py + Change Log for 21.9.2 release
  • fdfa3a0 Do not crash on call in except statement (#187)
  • d4e1350 EHN: also raise B006 for list/dict/set comprehensions (#186)
  • 931d95a Remove executable flags (#188)
  • 8210776 Fix string formatting to make black --experimental-string-processing happy
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=flake8-bugbear&package-manager=pip&previous-version=21.9.1&new-version=21.9.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ae56006b9c2..374ef8ad735 100644 --- a/poetry.lock +++ b/poetry.lock @@ -244,7 +244,7 @@ toml = "*" [[package]] name = "flake8-bugbear" -version = "21.9.1" +version = "21.9.2" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -1047,7 +1047,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "ddec4b961b63336adb798a590c98df41a874da5ee4b0e37993c3c3a0437dc839" +content-hash = "c55d59c37d135eeba1ae059cbfe702c6167dd68645d1001bc90482dddfde8ab4" [metadata.files] appdirs = [ @@ -1179,8 +1179,8 @@ flake8-black = [ {file = "flake8_black-0.2.3-py3-none-any.whl", hash = "sha256:cc080ba5b3773b69ba102b6617a00cc4ecbad8914109690cfda4d565ea435d96"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-21.9.1.tar.gz", hash = "sha256:2f60c8ce0dc53d51da119faab2d67dea978227f0f92ed3c44eb7d65fb2e06a96"}, - {file = "flake8_bugbear-21.9.1-py36.py37.py38-none-any.whl", hash = "sha256:45bfdccfb9f2d8aa140e33cac8f46f1e38215c13d5aa8650e7e188d84e2f94c6"}, + {file = "flake8-bugbear-21.9.2.tar.gz", hash = "sha256:db9a09893a6c649a197f5350755100bb1dd84f110e60cf532fdfa07e41808ab2"}, + {file = "flake8_bugbear-21.9.2-py36.py37.py38-none-any.whl", hash = "sha256:4f7eaa6f05b7d7ea4cbbde93f7bcdc5438e79320fa1ec420d860c181af38b769"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, diff --git a/pyproject.toml b/pyproject.toml index c5197c626ab..2f2c2549d9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ bandit = "^1.7.0" radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.1.0" -flake8-bugbear = "^21.9.1" +flake8-bugbear = "^21.9.2" mkdocs-material = "^7.3.0" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" From 68c810e6feb7a02cd16629414c8dd7fa808983de Mon Sep 17 00:00:00 2001 From: Dani Comnea Date: Fri, 1 Oct 2021 12:36:55 +0100 Subject: [PATCH 21/40] docs: Terraform reference for SAR Lambda Layer (#716) Co-authored-by: heitorlessa --- docs/index.md | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/docs/index.md b/docs/index.md index 781a96e2eb3..b79a2e7074f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -124,6 +124,54 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, ) ``` +=== "Terraform" + + > Credits to [Dani Comnea](https://github.com/DanyC97) for providing the Terraform equivalent. + + ```terraform hl_lines="12-13 15-20 23-25 40" + terraform { + required_version = "~> 0.13" + required_providers { + aws = "~> 3.50.0" + } + } + + provider "aws" { + region = "us-east-1" + } + + resource "aws_serverlessapplicationrepository_cloudformation_stack" "deploy_sar_stack" { + name = "aws-lambda-powertools-python-layer" + + application_id = data.aws_serverlessapplicationrepository_application.sar_app.application_id + semantic_version = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version + capabilities = [ + "CAPABILITY_IAM", + "CAPABILITY_NAMED_IAM" + ] + } + + data "aws_serverlessapplicationrepository_application" "sar_app" { + application_id = "arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer" + semantic_version = var.aws_powertools_version + } + + variable "aws_powertools_version" { + type = string + default = "1.20.2" + description = "The AWS Powertools release version" + } + + output "deployed_powertools_sar_version" { + value = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version + } + + # Fetch Lambda Powertools Layer ARN from deployed SAR App + output "aws_lambda_powertools_layer_arn" { + value = aws_serverlessapplicationrepository_cloudformation_stack.deploy_sar_stack.outputs.LayerVersionArn + } + ``` + ??? tip "Example of least-privileged IAM permissions to deploy Layer" > Credits to [mwarkentin](https://github.com/mwarkentin) for providing the scoped down IAM permissions. From 9432a538e5da98c36b7cca04022ee5df18f1874e Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 1 Oct 2021 15:16:26 +0200 Subject: [PATCH 22/40] fix(mypy): a few return types, type signatures, and untyped areas (#718) --- aws_lambda_powertools/logging/formatter.py | 8 ++--- aws_lambda_powertools/logging/logger.py | 6 ++-- aws_lambda_powertools/metrics/base.py | 6 ++-- aws_lambda_powertools/metrics/metric.py | 2 +- aws_lambda_powertools/metrics/metrics.py | 30 +++++++++++-------- .../middleware_factory/factory.py | 2 +- .../shared/jmespath_utils.py | 9 +++--- aws_lambda_powertools/tracing/tracer.py | 2 +- .../utilities/data_classes/sqs_event.py | 4 +-- .../idempotency/persistence/dynamodb.py | 2 +- .../utilities/validation/base.py | 4 +-- mypy.ini | 6 ++++ 12 files changed, 47 insertions(+), 34 deletions(-) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index de9254a3371..e35c9a7a327 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -58,7 +58,7 @@ class LambdaPowertoolsFormatter(BasePowertoolsFormatter): def __init__( self, json_serializer: Optional[Callable[[Dict], str]] = None, - json_deserializer: Optional[Callable[[Dict], str]] = None, + json_deserializer: Optional[Callable[[Union[Dict, str, bool, int, float]], str]] = None, json_default: Optional[Callable[[Any], Any]] = None, datefmt: Optional[str] = None, log_record_order: Optional[List[str]] = None, @@ -106,7 +106,7 @@ def __init__( self.update_formatter = self.append_keys # alias to old method if self.utc: - self.converter = time.gmtime + self.converter = time.gmtime # type: ignore super(LambdaPowertoolsFormatter, self).__init__(datefmt=self.datefmt) @@ -128,7 +128,7 @@ def format(self, record: logging.LogRecord) -> str: # noqa: A003 return self.serialize(log=formatted_log) def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -> str: - record_ts = self.converter(record.created) + record_ts = self.converter(record.created) # type: ignore if datefmt: return time.strftime(datefmt, record_ts) @@ -201,7 +201,7 @@ def _extract_log_exception(self, log_record: logging.LogRecord) -> Union[Tuple[s Log record with constant traceback info and exception name """ if log_record.exc_info: - return self.formatException(log_record.exc_info), log_record.exc_info[0].__name__ + return self.formatException(log_record.exc_info), log_record.exc_info[0].__name__ # type: ignore return None, None diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 35054f86137..e8b67a2ca7e 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -361,7 +361,7 @@ def registered_handler(self) -> logging.Handler: return handlers[0] @property - def registered_formatter(self) -> Optional[PowertoolsFormatter]: + def registered_formatter(self) -> PowertoolsFormatter: """Convenience property to access logger formatter""" return self.registered_handler.formatter # type: ignore @@ -405,7 +405,9 @@ def get_correlation_id(self) -> Optional[str]: str, optional Value for the correlation id """ - return self.registered_formatter.log_format.get("correlation_id") + if isinstance(self.registered_formatter, LambdaPowertoolsFormatter): + return self.registered_formatter.log_format.get("correlation_id") + return None @staticmethod def _get_log_level(level: Union[str, int, None]) -> Union[str, int]: diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index 853f06f210b..25e502d0887 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -90,7 +90,7 @@ def __init__( self._metric_unit_options = list(MetricUnit.__members__) self.metadata_set = metadata_set if metadata_set is not None else {} - def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): + def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None: """Adds given metric Example @@ -215,7 +215,7 @@ def serialize_metric_set( **metric_names_and_values, # "single_metric": 1.0 } - def add_dimension(self, name: str, value: str): + def add_dimension(self, name: str, value: str) -> None: """Adds given dimension to all metrics Example @@ -241,7 +241,7 @@ def add_dimension(self, name: str, value: str): # checking before casting improves performance in most cases self.dimension_set[name] = value if isinstance(value, str) else str(value) - def add_metadata(self, key: str, value: Any): + def add_metadata(self, key: str, value: Any) -> None: """Adds high cardinal metadata for metrics object This will not be available during metrics visualization. diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 1ac2bd9450e..a30f428e38e 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -42,7 +42,7 @@ class SingleMetric(MetricManager): Inherits from `aws_lambda_powertools.metrics.base.MetricManager` """ - def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): + def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None: """Method to prevent more than one metric being created Parameters diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index fafc604b505..23e9f542eea 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -2,8 +2,9 @@ import json import logging import warnings -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict, Optional, Union, cast +from ..shared.types import AnyCallableT from .base import MetricManager, MetricUnit from .metric import single_metric @@ -87,7 +88,7 @@ def __init__(self, service: Optional[str] = None, namespace: Optional[str] = Non service=self.service, ) - def set_default_dimensions(self, **dimensions): + def set_default_dimensions(self, **dimensions) -> None: """Persist dimensions across Lambda invocations Parameters @@ -113,10 +114,10 @@ def lambda_handler(): self.default_dimensions.update(**dimensions) - def clear_default_dimensions(self): + def clear_default_dimensions(self) -> None: self.default_dimensions.clear() - def clear_metrics(self): + def clear_metrics(self) -> None: logger.debug("Clearing out existing metric set from memory") self.metric_set.clear() self.dimension_set.clear() @@ -125,11 +126,11 @@ def clear_metrics(self): def log_metrics( self, - lambda_handler: Optional[Callable[[Any, Any], Any]] = None, + lambda_handler: Union[Callable[[Dict, Any], Any], Optional[Callable[[Dict, Any, Optional[Dict]], Any]]] = None, capture_cold_start_metric: bool = False, raise_on_empty_metrics: bool = False, default_dimensions: Optional[Dict[str, str]] = None, - ): + ) -> AnyCallableT: """Decorator to serialize and publish metrics at the end of a function execution. Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). @@ -169,11 +170,14 @@ def handler(event, context): # Return a partial function with args filled if lambda_handler is None: logger.debug("Decorator called with parameters") - return functools.partial( - self.log_metrics, - capture_cold_start_metric=capture_cold_start_metric, - raise_on_empty_metrics=raise_on_empty_metrics, - default_dimensions=default_dimensions, + return cast( + AnyCallableT, + functools.partial( + self.log_metrics, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + default_dimensions=default_dimensions, + ), ) @functools.wraps(lambda_handler) @@ -194,9 +198,9 @@ def decorate(event, context): return response - return decorate + return cast(AnyCallableT, decorate) - def __add_cold_start_metric(self, context: Any): + def __add_cold_start_metric(self, context: Any) -> None: """Add cold start metric and function_name dimension Parameters diff --git a/aws_lambda_powertools/middleware_factory/factory.py b/aws_lambda_powertools/middleware_factory/factory.py index 74858bf6709..8ab16c5e8b7 100644 --- a/aws_lambda_powertools/middleware_factory/factory.py +++ b/aws_lambda_powertools/middleware_factory/factory.py @@ -118,7 +118,7 @@ def final_decorator(func: Optional[Callable] = None, **kwargs): if not inspect.isfunction(func): # @custom_middleware(True) vs @custom_middleware(log_event=True) raise MiddlewareInvalidArgumentError( - f"Only keyword arguments is supported for middlewares: {decorator.__qualname__} received {func}" + f"Only keyword arguments is supported for middlewares: {decorator.__qualname__} received {func}" # type: ignore # noqa: E501 ) @functools.wraps(func) diff --git a/aws_lambda_powertools/shared/jmespath_utils.py b/aws_lambda_powertools/shared/jmespath_utils.py index 9cc736aedfb..bbb3b38fe04 100644 --- a/aws_lambda_powertools/shared/jmespath_utils.py +++ b/aws_lambda_powertools/shared/jmespath_utils.py @@ -6,22 +6,23 @@ import jmespath from jmespath.exceptions import LexerError +from jmespath.functions import Functions, signature from aws_lambda_powertools.exceptions import InvalidEnvelopeExpressionError logger = logging.getLogger(__name__) -class PowertoolsFunctions(jmespath.functions.Functions): - @jmespath.functions.signature({"types": ["string"]}) +class PowertoolsFunctions(Functions): + @signature({"types": ["string"]}) def _func_powertools_json(self, value): return json.loads(value) - @jmespath.functions.signature({"types": ["string"]}) + @signature({"types": ["string"]}) def _func_powertools_base64(self, value): return base64.b64decode(value).decode() - @jmespath.functions.signature({"types": ["string"]}) + @signature({"types": ["string"]}) def _func_powertools_base64_gzip(self, value): encoded = base64.b64decode(value) uncompressed = gzip.decompress(encoded) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index dc010a3712f..2626793304c 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -17,7 +17,7 @@ logger = logging.getLogger(__name__) aws_xray_sdk = LazyLoader(constants.XRAY_SDK_MODULE, globals(), constants.XRAY_SDK_MODULE) -aws_xray_sdk.core = LazyLoader(constants.XRAY_SDK_CORE_MODULE, globals(), constants.XRAY_SDK_CORE_MODULE) +aws_xray_sdk.core = LazyLoader(constants.XRAY_SDK_CORE_MODULE, globals(), constants.XRAY_SDK_CORE_MODULE) # type: ignore # noqa: E501 class Tracer: diff --git a/aws_lambda_powertools/utilities/data_classes/sqs_event.py b/aws_lambda_powertools/utilities/data_classes/sqs_event.py index 0e70684cc3f..57caeea4cc2 100644 --- a/aws_lambda_powertools/utilities/data_classes/sqs_event.py +++ b/aws_lambda_powertools/utilities/data_classes/sqs_event.py @@ -75,9 +75,9 @@ def data_type(self) -> str: class SQSMessageAttributes(Dict[str, SQSMessageAttribute]): - def __getitem__(self, key: str) -> Optional[SQSMessageAttribute]: + def __getitem__(self, key: str) -> Optional[SQSMessageAttribute]: # type: ignore item = super(SQSMessageAttributes, self).get(key) - return None if item is None else SQSMessageAttribute(item) + return None if item is None else SQSMessageAttribute(item) # type: ignore class SQSRecord(DictWrapper): diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index 73f241bd613..c1ed29c6fd3 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -155,7 +155,7 @@ def _update_record(self, data_record: DataRecord): "ExpressionAttributeNames": expression_attr_names, } - self.table.update_item(**kwargs) # type: ignore + self.table.update_item(**kwargs) def _delete_record(self, data_record: DataRecord) -> None: logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") diff --git a/aws_lambda_powertools/utilities/validation/base.py b/aws_lambda_powertools/utilities/validation/base.py index 2a337b85971..61d692d7f28 100644 --- a/aws_lambda_powertools/utilities/validation/base.py +++ b/aws_lambda_powertools/utilities/validation/base.py @@ -33,10 +33,10 @@ def validate_data_against_schema(data: Union[Dict, str], schema: Dict, formats: except (TypeError, AttributeError, fastjsonschema.JsonSchemaDefinitionException) as e: raise InvalidSchemaFormatError(f"Schema received: {schema}, Formats: {formats}. Error: {e}") except fastjsonschema.JsonSchemaValueException as e: - message = f"Failed schema validation. Error: {e.message}, Path: {e.path}, Data: {e.value}" + message = f"Failed schema validation. Error: {e.message}, Path: {e.path}, Data: {e.value}" # noqa: B306 raise SchemaValidationError( message, - validation_message=e.message, + validation_message=e.message, # noqa: B306 name=e.name, path=e.path, value=e.value, diff --git a/mypy.ini b/mypy.ini index 2436d7074d2..faf6014a54d 100644 --- a/mypy.ini +++ b/mypy.ini @@ -11,6 +11,12 @@ show_error_context = True [mypy-jmespath] ignore_missing_imports=True +[mypy-jmespath.exceptions] +ignore_missing_imports=True + +[mypy-jmespath.functions] +ignore_missing_imports=True + [mypy-boto3] ignore_missing_imports = True From 3d4305b91d05112ac25aae0947bcc764c191e503 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 1 Oct 2021 16:09:30 +0200 Subject: [PATCH 23/40] feat: add get_raw_configuration property in store; expose store --- .../utilities/feature_flags/appconfig.py | 48 +++++++++++-------- .../utilities/feature_flags/base.py | 13 ++++- .../utilities/feature_flags/feature_flags.py | 10 ++-- .../feature_flags/test_feature_flags.py | 12 +++++ 4 files changed, 56 insertions(+), 27 deletions(-) diff --git a/aws_lambda_powertools/utilities/feature_flags/appconfig.py b/aws_lambda_powertools/utilities/feature_flags/appconfig.py index 2e0edc3b9b1..df3f83c47aa 100644 --- a/aws_lambda_powertools/utilities/feature_flags/appconfig.py +++ b/aws_lambda_powertools/utilities/feature_flags/appconfig.py @@ -55,9 +55,31 @@ def __init__( self.jmespath_options = jmespath_options self._conf_store = AppConfigProvider(environment=environment, application=application, config=sdk_config) + @property + def get_raw_configuration(self) -> Dict[str, Any]: + """Fetch feature schema configuration from AWS AppConfig""" + try: + # parse result conf as JSON, keep in cache for self.max_age seconds + return cast( + dict, + self._conf_store.get( + name=self.name, + transform=TRANSFORM_TYPE, + max_age=self.cache_seconds, + ), + ) + except (GetParameterError, TransformParameterError) as exc: + err_msg = traceback.format_exc() + if "AccessDenied" in err_msg: + raise StoreClientError(err_msg) from exc + raise ConfigurationStoreError("Unable to get AWS AppConfig configuration file") from exc + def get_configuration(self) -> Dict[str, Any]: """Fetch feature schema configuration from AWS AppConfig + If envelope is set, it'll extract and return feature flags from configuration, + otherwise it'll return the entire configuration fetched from AWS AppConfig. + Raises ------ ConfigurationStoreError @@ -68,25 +90,11 @@ def get_configuration(self) -> Dict[str, Any]: Dict[str, Any] parsed JSON dictionary """ - try: - # parse result conf as JSON, keep in cache for self.max_age seconds - config = cast( - dict, - self._conf_store.get( - name=self.name, - transform=TRANSFORM_TYPE, - max_age=self.cache_seconds, - ), - ) + config = self.get_raw_configuration - if self.envelope: - config = jmespath_utils.extract_data_from_envelope( - data=config, envelope=self.envelope, jmespath_options=self.jmespath_options - ) + if self.envelope: + config = jmespath_utils.extract_data_from_envelope( + data=config, envelope=self.envelope, jmespath_options=self.jmespath_options + ) - return config - except (GetParameterError, TransformParameterError) as exc: - err_msg = traceback.format_exc() - if "AccessDenied" in err_msg: - raise StoreClientError(err_msg) from exc - raise ConfigurationStoreError("Unable to get AWS AppConfig configuration file") from exc + return config diff --git a/aws_lambda_powertools/utilities/feature_flags/base.py b/aws_lambda_powertools/utilities/feature_flags/base.py index edb94c4f45d..e323f32d8b1 100644 --- a/aws_lambda_powertools/utilities/feature_flags/base.py +++ b/aws_lambda_powertools/utilities/feature_flags/base.py @@ -3,10 +3,19 @@ class StoreProvider(ABC): + @property + @abstractmethod + def get_raw_configuration(self) -> Dict[str, Any]: + """Get configuration from any store and return the parsed JSON dictionary""" + raise NotImplementedError() # pragma: no cover + @abstractmethod def get_configuration(self) -> Dict[str, Any]: """Get configuration from any store and return the parsed JSON dictionary + If envelope is set, it'll extract and return feature flags from configuration, + otherwise it'll return the entire configuration fetched from the store. + Raises ------ ConfigurationStoreError @@ -42,10 +51,10 @@ def get_configuration(self) -> Dict[str, Any]: } ``` """ - return NotImplemented # pragma: no cover + raise NotImplementedError() # pragma: no cover class BaseValidator(ABC): @abstractmethod def validate(self): - return NotImplemented # pragma: no cover + raise NotImplementedError() # pragma: no cover diff --git a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py index d04e74ff293..d26144a262a 100644 --- a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py +++ b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, Dict, List, Optional, cast from . import schema from .base import StoreProvider @@ -36,7 +36,7 @@ def __init__(self, store: StoreProvider): store: StoreProvider Store to use to fetch feature flag schema configuration. """ - self._store = store + self.store = store @staticmethod def _match_by_action(action: str, condition_value: Any, context_value: Any) -> bool: @@ -103,7 +103,7 @@ def _evaluate_rules( return feat_default return False - def get_configuration(self) -> Union[Dict[str, Dict], Dict]: + def get_configuration(self) -> Dict: """Get validated feature flag schema from configured store. Largely used to aid testing, since it's called by `evaluate` and `get_enabled_features` methods. @@ -146,8 +146,8 @@ def get_configuration(self) -> Union[Dict[str, Dict], Dict]: ``` """ # parse result conf as JSON, keep in cache for max age defined in store - logger.debug(f"Fetching schema from registered store, store={self._store}") - config = self._store.get_configuration() + logger.debug(f"Fetching schema from registered store, store={self.store}") + config: Dict = self.store.get_configuration() validator = schema.SchemaValidator(schema=config) validator.validate() diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index 5342105da3d..8b6698a8179 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -587,3 +587,15 @@ def test_get_feature_toggle_propagates_access_denied_error(mocker, config): # THEN raise StoreClientError error with pytest.raises(StoreClientError, match="AccessDeniedException") as err: feature_flags.evaluate(name="Foo", default=False) + + +def test_get_configuration_with_envelope_and_raw(mocker, config): + expected_value = True + mocked_app_config_schema = {"log_level": "INFO", "features": {"my_feature": {"default": expected_value}}} + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config, envelope="features") + + features_config = feature_flags.get_configuration() + config = feature_flags.store.get_raw_configuration + + assert "log_level" in config + assert "log_level" not in features_config From c21ba45e4320d64a1dd6abe81c41fe53faa7fb60 Mon Sep 17 00:00:00 2001 From: Tom McCarthy Date: Fri, 1 Oct 2021 16:29:35 +0200 Subject: [PATCH 24/40] feat: boto3 sessions in batch, parameters & idempotency (#717) --- aws_lambda_powertools/utilities/batch/sqs.py | 19 ++++++- .../utilities/parameters/appconfig.py | 13 ++++- .../utilities/parameters/dynamodb.py | 7 ++- .../utilities/parameters/secrets.py | 8 ++- .../utilities/parameters/ssm.py | 7 ++- docs/utilities/batch.md | 56 ++++++++++++++++++- docs/utilities/idempotency.md | 2 +- docs/utilities/parameters.md | 37 ++++++++++++ 8 files changed, 134 insertions(+), 15 deletions(-) diff --git a/aws_lambda_powertools/utilities/batch/sqs.py b/aws_lambda_powertools/utilities/batch/sqs.py index e37fdbd3fb5..38773a399dd 100644 --- a/aws_lambda_powertools/utilities/batch/sqs.py +++ b/aws_lambda_powertools/utilities/batch/sqs.py @@ -31,6 +31,8 @@ class PartialSQSProcessor(BasePartialProcessor): botocore config object suppress_exception: bool, optional Supress exception raised if any messages fail processing, by default False + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication Example @@ -56,12 +58,18 @@ class PartialSQSProcessor(BasePartialProcessor): """ - def __init__(self, config: Optional[Config] = None, suppress_exception: bool = False): + def __init__( + self, + config: Optional[Config] = None, + suppress_exception: bool = False, + boto3_session: Optional[boto3.session.Session] = None, + ): """ Initializes sqs client. """ config = config or Config() - self.client = boto3.client("sqs", config=config) + session = boto3_session or boto3.session.Session() + self.client = session.client("sqs", config=config) self.suppress_exception = suppress_exception super().__init__() @@ -142,6 +150,7 @@ def sqs_batch_processor( record_handler: Callable, config: Optional[Config] = None, suppress_exception: bool = False, + boto3_session: Optional[boto3.session.Session] = None, ): """ Middleware to handle SQS batch event processing @@ -160,6 +169,8 @@ def sqs_batch_processor( botocore config object suppress_exception: bool, optional Supress exception raised if any messages fail processing, by default False + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication Examples -------- @@ -180,7 +191,9 @@ def sqs_batch_processor( """ config = config or Config() - processor = PartialSQSProcessor(config=config, suppress_exception=suppress_exception) + session = boto3_session or boto3.session.Session() + + processor = PartialSQSProcessor(config=config, suppress_exception=suppress_exception, boto3_session=session) records = event["Records"] diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py index 4a400aa7789..d1613c14513 100644 --- a/aws_lambda_powertools/utilities/parameters/appconfig.py +++ b/aws_lambda_powertools/utilities/parameters/appconfig.py @@ -29,6 +29,8 @@ class AppConfigProvider(BaseProvider): Application of the configuration to pass during client initialization config: botocore.config.Config, optional Botocore configuration to pass during client initialization + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication Example ------- @@ -60,13 +62,20 @@ class AppConfigProvider(BaseProvider): client: Any = None - def __init__(self, environment: str, application: Optional[str] = None, config: Optional[Config] = None): + def __init__( + self, + environment: str, + application: Optional[str] = None, + config: Optional[Config] = None, + boto3_session: Optional[boto3.session.Session] = None, + ): """ Initialize the App Config client """ config = config or Config() - self.client = boto3.client("appconfig", config=config) + session = boto3_session or boto3.session.Session() + self.client = session.client("appconfig", config=config) self.application = resolve_env_var_choice( choice=application, env=os.getenv(constants.SERVICE_NAME_ENV, "service_undefined") ) diff --git a/aws_lambda_powertools/utilities/parameters/dynamodb.py b/aws_lambda_powertools/utilities/parameters/dynamodb.py index 39bd1a8d6b7..9220edf3b05 100644 --- a/aws_lambda_powertools/utilities/parameters/dynamodb.py +++ b/aws_lambda_powertools/utilities/parameters/dynamodb.py @@ -30,6 +30,8 @@ class DynamoDBProvider(BaseProvider): Complete url to reference local DynamoDB instance, e.g. http://localhost:8080 config: botocore.config.Config, optional Botocore configuration to pass during client initialization + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication Example ------- @@ -149,13 +151,16 @@ def __init__( value_attr: str = "value", endpoint_url: Optional[str] = None, config: Optional[Config] = None, + boto3_session: Optional[boto3.session.Session] = None, ): """ Initialize the DynamoDB client """ config = config or Config() - self.table = boto3.resource("dynamodb", endpoint_url=endpoint_url, config=config).Table(table_name) + session = boto3_session or boto3.session.Session() + + self.table = session.resource("dynamodb", endpoint_url=endpoint_url, config=config).Table(table_name) self.key_attr = key_attr self.sort_attr = sort_attr diff --git a/aws_lambda_powertools/utilities/parameters/secrets.py b/aws_lambda_powertools/utilities/parameters/secrets.py index 5699876d90e..b64e70ae184 100644 --- a/aws_lambda_powertools/utilities/parameters/secrets.py +++ b/aws_lambda_powertools/utilities/parameters/secrets.py @@ -19,6 +19,8 @@ class SecretsProvider(BaseProvider): ---------- config: botocore.config.Config, optional Botocore configuration to pass during client initialization + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication Example ------- @@ -58,14 +60,14 @@ class SecretsProvider(BaseProvider): client: Any = None - def __init__(self, config: Optional[Config] = None): + def __init__(self, config: Optional[Config] = None, boto3_session: Optional[boto3.session.Session] = None): """ Initialize the Secrets Manager client """ config = config or Config() - - self.client = boto3.client("secretsmanager", config=config) + session = boto3_session or boto3.session.Session() + self.client = session.client("secretsmanager", config=config) super().__init__() diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py index 2a16ad91f08..4cbb16354c7 100644 --- a/aws_lambda_powertools/utilities/parameters/ssm.py +++ b/aws_lambda_powertools/utilities/parameters/ssm.py @@ -19,6 +19,8 @@ class SSMProvider(BaseProvider): ---------- config: botocore.config.Config, optional Botocore configuration to pass during client initialization + boto3_session : boto3.session.Session, optional + Boto3 session to use for AWS API communication Example ------- @@ -74,13 +76,14 @@ class SSMProvider(BaseProvider): client: Any = None - def __init__(self, config: Optional[Config] = None): + def __init__(self, config: Optional[Config] = None, boto3_session: Optional[boto3.session.Session] = None): """ Initialize the SSM Parameter Store client """ config = config or Config() - self.client = boto3.client("ssm", config=config) + session = boto3_session or boto3.session.Session() + self.client = session.client("ssm", config=config) super().__init__() diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 96770fb1849..56ab160e9f9 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -143,10 +143,13 @@ Use `PartialSQSProcessor` context manager to access a list of all return values return result ``` -### Passing custom boto3 config +### Customizing boto configuration -If you need to pass custom configuration such as region to the SDK, you can pass your own [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) to -the `sqs_batch_processor` decorator: +The **`config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) +or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when using the `sqs_batch_processor` +decorator or `PartialSQSProcessor` class. + +> Custom config example === "Decorator" @@ -193,6 +196,53 @@ the `sqs_batch_processor` decorator: return result ``` +> Custom boto3 session example + +=== "Decorator" + + ```python hl_lines="4 12" + from aws_lambda_powertools.utilities.batch import sqs_batch_processor + from botocore.config import Config + + session = boto3.session.Session() + + def record_handler(record): + # This will be called for each individual message from a batch + # It should raise an exception if the message was not processed successfully + return_value = do_something_with(record["body"]) + return return_value + + @sqs_batch_processor(record_handler=record_handler, boto3_session=session) + def lambda_handler(event, context): + return {"statusCode": 200} + ``` + +=== "Context manager" + + ```python hl_lines="4 16" + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + import boto3 + + session = boto3.session.Session() + + def record_handler(record): + # This will be called for each individual message from a batch + # It should raise an exception if the message was not processed successfully + return_value = do_something_with(record["body"]) + return return_value + + + def lambda_handler(event, context): + records = event["Records"] + + processor = PartialSQSProcessor(boto3_session=session) + + with processor(records, record_handler): + result = processor.process() + + return result + ``` + ### Suppressing exceptions If you want to disable the default behavior where `SQSBatchProcessingError` is raised if there are any errors, you can pass the `suppress_exception` boolean argument. diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index a9a5a129e63..3508e2190e3 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -548,7 +548,7 @@ This means that we will raise **`IdempotencyKeyError`** if the evaluation of **` ### Customizing boto configuration -You can provide a custom boto configuration via **`boto_config`**, or an existing boto session via **`boto3_session`** parameters, when constructing the persistence store. +The **`boto_config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when constructing the persistence store. === "Custom session" diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md index 081d22817ab..51fd0196abd 100644 --- a/docs/utilities/parameters.md +++ b/docs/utilities/parameters.md @@ -501,3 +501,40 @@ Here is the mapping between this utility's functions and methods and the underly | DynamoDB | `DynamoDBProvider.get` | `dynamodb` | ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [get_item](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.get_item) | DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb` | ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [query](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.query) | App Config | `get_app_config` | `appconfig` | [get_configuration](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/appconfig.html#AppConfig.Client.get_configuration) | + + +### Customizing boto configuration + +The **`config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when constructing any of the built-in provider classes. + +> **Example** + + +=== "Custom session" + + ```python hl_lines="2 4 5" + from aws_lambda_powertools.utilities import parameters + import boto3 + + boto3_session = boto3.session.Session() + ssm_provider = parameters.SSMProvider(boto3_session=boto3_session) + + def handler(event, context): + # Retrieve a single parameter + value = ssm_provider.get("/my/parameter") + ... + ``` +=== "Custom config" + + ```python hl_lines="2 4 5" + from aws_lambda_powertools.utilities import parameters + from botocore.config import Config + + boto_config = Config() + ssm_provider = parameters.SSMProvider(config=boto_config) + + def handler(event, context): + # Retrieve a single parameter + value = ssm_provider.get("/my/parameter") + ... + ``` From 2fc7c42202d614002281b2a3de10469995a8acfa Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 1 Oct 2021 16:43:40 +0200 Subject: [PATCH 25/40] feat(feature-flags): get_raw_configuration property in Store (#720) --- .../utilities/feature_flags/appconfig.py | 48 +++++++++++-------- .../utilities/feature_flags/base.py | 13 ++++- .../utilities/feature_flags/feature_flags.py | 10 ++-- docs/utilities/feature_flags.md | 21 ++++++++ .../feature_flags/test_feature_flags.py | 12 +++++ 5 files changed, 77 insertions(+), 27 deletions(-) diff --git a/aws_lambda_powertools/utilities/feature_flags/appconfig.py b/aws_lambda_powertools/utilities/feature_flags/appconfig.py index 2e0edc3b9b1..df3f83c47aa 100644 --- a/aws_lambda_powertools/utilities/feature_flags/appconfig.py +++ b/aws_lambda_powertools/utilities/feature_flags/appconfig.py @@ -55,9 +55,31 @@ def __init__( self.jmespath_options = jmespath_options self._conf_store = AppConfigProvider(environment=environment, application=application, config=sdk_config) + @property + def get_raw_configuration(self) -> Dict[str, Any]: + """Fetch feature schema configuration from AWS AppConfig""" + try: + # parse result conf as JSON, keep in cache for self.max_age seconds + return cast( + dict, + self._conf_store.get( + name=self.name, + transform=TRANSFORM_TYPE, + max_age=self.cache_seconds, + ), + ) + except (GetParameterError, TransformParameterError) as exc: + err_msg = traceback.format_exc() + if "AccessDenied" in err_msg: + raise StoreClientError(err_msg) from exc + raise ConfigurationStoreError("Unable to get AWS AppConfig configuration file") from exc + def get_configuration(self) -> Dict[str, Any]: """Fetch feature schema configuration from AWS AppConfig + If envelope is set, it'll extract and return feature flags from configuration, + otherwise it'll return the entire configuration fetched from AWS AppConfig. + Raises ------ ConfigurationStoreError @@ -68,25 +90,11 @@ def get_configuration(self) -> Dict[str, Any]: Dict[str, Any] parsed JSON dictionary """ - try: - # parse result conf as JSON, keep in cache for self.max_age seconds - config = cast( - dict, - self._conf_store.get( - name=self.name, - transform=TRANSFORM_TYPE, - max_age=self.cache_seconds, - ), - ) + config = self.get_raw_configuration - if self.envelope: - config = jmespath_utils.extract_data_from_envelope( - data=config, envelope=self.envelope, jmespath_options=self.jmespath_options - ) + if self.envelope: + config = jmespath_utils.extract_data_from_envelope( + data=config, envelope=self.envelope, jmespath_options=self.jmespath_options + ) - return config - except (GetParameterError, TransformParameterError) as exc: - err_msg = traceback.format_exc() - if "AccessDenied" in err_msg: - raise StoreClientError(err_msg) from exc - raise ConfigurationStoreError("Unable to get AWS AppConfig configuration file") from exc + return config diff --git a/aws_lambda_powertools/utilities/feature_flags/base.py b/aws_lambda_powertools/utilities/feature_flags/base.py index edb94c4f45d..e323f32d8b1 100644 --- a/aws_lambda_powertools/utilities/feature_flags/base.py +++ b/aws_lambda_powertools/utilities/feature_flags/base.py @@ -3,10 +3,19 @@ class StoreProvider(ABC): + @property + @abstractmethod + def get_raw_configuration(self) -> Dict[str, Any]: + """Get configuration from any store and return the parsed JSON dictionary""" + raise NotImplementedError() # pragma: no cover + @abstractmethod def get_configuration(self) -> Dict[str, Any]: """Get configuration from any store and return the parsed JSON dictionary + If envelope is set, it'll extract and return feature flags from configuration, + otherwise it'll return the entire configuration fetched from the store. + Raises ------ ConfigurationStoreError @@ -42,10 +51,10 @@ def get_configuration(self) -> Dict[str, Any]: } ``` """ - return NotImplemented # pragma: no cover + raise NotImplementedError() # pragma: no cover class BaseValidator(ABC): @abstractmethod def validate(self): - return NotImplemented # pragma: no cover + raise NotImplementedError() # pragma: no cover diff --git a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py index d04e74ff293..d26144a262a 100644 --- a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py +++ b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, Dict, List, Optional, cast from . import schema from .base import StoreProvider @@ -36,7 +36,7 @@ def __init__(self, store: StoreProvider): store: StoreProvider Store to use to fetch feature flag schema configuration. """ - self._store = store + self.store = store @staticmethod def _match_by_action(action: str, condition_value: Any, context_value: Any) -> bool: @@ -103,7 +103,7 @@ def _evaluate_rules( return feat_default return False - def get_configuration(self) -> Union[Dict[str, Dict], Dict]: + def get_configuration(self) -> Dict: """Get validated feature flag schema from configured store. Largely used to aid testing, since it's called by `evaluate` and `get_enabled_features` methods. @@ -146,8 +146,8 @@ def get_configuration(self) -> Union[Dict[str, Dict], Dict]: ``` """ # parse result conf as JSON, keep in cache for max age defined in store - logger.debug(f"Fetching schema from registered store, store={self._store}") - config = self._store.get_configuration() + logger.debug(f"Fetching schema from registered store, store={self.store}") + config: Dict = self.store.get_configuration() validator = schema.SchemaValidator(schema=config) validator.validate() diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index d22f9c03296..a400bda8e0c 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -529,6 +529,27 @@ For this to work, you need to use a JMESPath expression via the `envelope` param } ``` +### Getting fetched configuration + +You can access the configuration fetched from the store via `get_raw_configuration` property within the store instance. + +=== "app.py" + + ```python hl_lines="12" + from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore + + app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="configuration", + envelope = "feature_flags" + ) + + feature_flags = FeatureFlags(store=app_config) + + config = app_config.get_raw_configuration + ``` + ### Built-in store provider !!! info "For GA, you'll be able to bring your own store." diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index 5342105da3d..8b6698a8179 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -587,3 +587,15 @@ def test_get_feature_toggle_propagates_access_denied_error(mocker, config): # THEN raise StoreClientError error with pytest.raises(StoreClientError, match="AccessDeniedException") as err: feature_flags.evaluate(name="Foo", default=False) + + +def test_get_configuration_with_envelope_and_raw(mocker, config): + expected_value = True + mocked_app_config_schema = {"log_level": "INFO", "features": {"my_feature": {"default": expected_value}}} + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config, envelope="features") + + features_config = feature_flags.get_configuration() + config = feature_flags.store.get_raw_configuration + + assert "log_level" in config + assert "log_level" not in features_config From 19b152621df9ec96e9e794c82f3af6628949c259 Mon Sep 17 00:00:00 2001 From: Tom McCarthy Date: Fri, 1 Oct 2021 17:03:11 +0200 Subject: [PATCH 26/40] feat(idempotency): makes customers unit testing easier (#719) Co-authored-by: Heitor Lessa --- aws_lambda_powertools/shared/constants.py | 2 + .../utilities/idempotency/idempotency.py | 8 ++ .../idempotency/persistence/dynamodb.py | 29 ++++- docs/utilities/idempotency.md | 117 ++++++++++++++++++ .../idempotency/test_idempotency.py | 23 ++++ 5 files changed, 174 insertions(+), 5 deletions(-) diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index 622ffbce47b..45b46d236f9 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -21,3 +21,5 @@ XRAY_SDK_MODULE: str = "aws_xray_sdk" XRAY_SDK_CORE_MODULE: str = "aws_xray_sdk.core" + +IDEMPOTENCY_DISABLED_ENV: str = "POWERTOOLS_IDEMPOTENCY_DISABLED" diff --git a/aws_lambda_powertools/utilities/idempotency/idempotency.py b/aws_lambda_powertools/utilities/idempotency/idempotency.py index 06c9a578aa2..6984cfbbd8e 100644 --- a/aws_lambda_powertools/utilities/idempotency/idempotency.py +++ b/aws_lambda_powertools/utilities/idempotency/idempotency.py @@ -3,9 +3,11 @@ """ import functools import logging +import os from typing import Any, Callable, Dict, Optional, cast from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.shared.constants import IDEMPOTENCY_DISABLED_ENV from aws_lambda_powertools.shared.types import AnyCallableT from aws_lambda_powertools.utilities.idempotency.base import IdempotencyHandler from aws_lambda_powertools.utilities.idempotency.config import IdempotencyConfig @@ -56,6 +58,9 @@ def idempotent( >>> return {"StatusCode": 200} """ + if os.getenv(IDEMPOTENCY_DISABLED_ENV): + return handler(event, context) + config = config or IdempotencyConfig() args = event, context idempotency_handler = IdempotencyHandler( @@ -122,6 +127,9 @@ def process_order(customer_id: str, order: dict, **kwargs): @functools.wraps(function) def decorate(*args, **kwargs): + if os.getenv(IDEMPOTENCY_DISABLED_ENV): + return function(*args, **kwargs) + payload = kwargs.get(data_keyword_argument) if payload is None: diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index c1ed29c6fd3..0ce307ab503 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -62,11 +62,11 @@ def __init__( >>> return {"StatusCode": 200} """ - boto_config = boto_config or Config() - session = boto3_session or boto3.session.Session() - self._ddb_resource = session.resource("dynamodb", config=boto_config) + self._boto_config = boto_config or Config() + self._boto3_session = boto3_session or boto3.session.Session() + + self._table = None self.table_name = table_name - self.table = self._ddb_resource.Table(self.table_name) self.key_attr = key_attr self.expiry_attr = expiry_attr self.status_attr = status_attr @@ -74,6 +74,25 @@ def __init__( self.validation_key_attr = validation_key_attr super(DynamoDBPersistenceLayer, self).__init__() + @property + def table(self): + """ + Caching property to store boto3 dynamodb Table resource + + """ + if self._table: + return self._table + ddb_resource = self._boto3_session.resource("dynamodb", config=self._boto_config) + self._table = ddb_resource.Table(self.table_name) + return self._table + + @table.setter + def table(self, table): + """ + Allow table instance variable to be set directly, primarily for use in tests + """ + self._table = table + def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: """ Translate raw item records from DynamoDB to DataRecord @@ -125,7 +144,7 @@ def _put_record(self, data_record: DataRecord) -> None: ExpressionAttributeNames={"#id": self.key_attr, "#now": self.expiry_attr}, ExpressionAttributeValues={":now": int(now.timestamp())}, ) - except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: + except self.table.meta.client.exceptions.ConditionalCheckFailedException: logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") raise IdempotencyItemAlreadyExistsError diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 3508e2190e3..bf06e3292b7 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -765,6 +765,123 @@ The idempotency utility can be used with the `validator` decorator. Ensure that !!! tip "JMESPath Powertools functions are also available" Built-in functions known in the validation utility like `powertools_json`, `powertools_base64`, `powertools_base64_gzip` are also available to use in this utility. + +## Testing your code + +The idempotency utility provides several routes to test your code. + +### Disabling the idempotency utility +When testing your code, you may wish to disable the idempotency logic altogether and focus on testing your business logic. To do this, you can set the environment variable `POWERTOOLS_IDEMPOTENCY_DISABLED` +with a truthy value. If you prefer setting this for specific tests, and are using Pytest, you can use [monkeypatch](https://docs.pytest.org/en/latest/monkeypatch.html) fixture: + +=== "tests.py" + + ```python hl_lines="2 3" + def test_idempotent_lambda_handler(monkeypatch): + # Set POWERTOOLS_IDEMPOTENCY_DISABLED before calling decorated functions + monkeypatch.setenv("POWERTOOLS_IDEMPOTENCY_DISABLED", 1) + + result = handler() + ... + ``` +=== "app.py" + + ```python + from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") + + @idempotent(persistence_store=persistence_layer) + def handler(event, context): + print('expensive operation') + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } + ``` + +### Testing with DynamoDB Local + +To test with [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.DownloadingAndRunning.html), you can replace the `Table` resource used by the persistence layer with one you create inside your tests. This allows you to set the endpoint_url. + +=== "tests.py" + + ```python hl_lines="6 7 8" + import boto3 + + import app + + def test_idempotent_lambda(): + # Create our own Table resource using the endpoint for our DynamoDB Local instance + resource = boto3.resource("dynamodb", endpoint_url='http://localhost:8000') + table = resource.Table(app.persistence_layer.table_name) + app.persistence_layer.table = table + + result = app.handler({'testkey': 'testvalue'}, {}) + assert result['payment_id'] == 12345 + ``` + +=== "app.py" + + ```python + from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") + + @idempotent(persistence_store=persistence_layer) + def handler(event, context): + print('expensive operation') + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } + ``` + +### How do I mock all DynamoDB I/O operations + +The idempotency utility lazily creates the dynamodb [Table](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table) which it uses to access DynamoDB. +This means it is possible to pass a mocked Table resource, or stub various methods. + +=== "tests.py" + + ```python hl_lines="6 7 8 9" + from unittest.mock import MagicMock + + import app + + def test_idempotent_lambda(): + table = MagicMock() + app.persistence_layer.table = table + result = app.handler({'testkey': 'testvalue'}, {}) + table.put_item.assert_called() + ... + ``` + +=== "app.py" + + ```python + from aws_lambda_powertools.utilities.idempotency import ( + DynamoDBPersistenceLayer, idempotent + ) + + persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") + + @idempotent(persistence_store=persistence_layer) + def handler(event, context): + print('expensive operation') + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } + ``` + ## Extra resources If you're interested in a deep dive on how Amazon uses idempotency when building our APIs, check out diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index cb0d43ae6fa..b1d0914d181 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -3,6 +3,7 @@ import json import sys from hashlib import md5 +from unittest.mock import MagicMock import jmespath import pytest @@ -994,3 +995,25 @@ def dummy(payload): # WHEN dummy(payload=data_two) + + +def test_idempotency_disabled_envvar(monkeypatch, lambda_context, persistence_store: DynamoDBPersistenceLayer): + # Scenario to validate no requests sent to dynamodb table when 'POWERTOOLS_IDEMPOTENCY_DISABLED' is set + mock_event = {"data": "value"} + + persistence_store.table = MagicMock() + + monkeypatch.setenv("POWERTOOLS_IDEMPOTENCY_DISABLED", "1") + + @idempotent_function(data_keyword_argument="data", persistence_store=persistence_store) + def dummy(data): + return {"message": "hello"} + + @idempotent(persistence_store=persistence_store) + def dummy_handler(event, context): + return {"message": "hi"} + + dummy(data=mock_event) + dummy_handler(mock_event, lambda_context) + + assert len(persistence_store.table.method_calls) == 0 From c837e0ac7c6f63776e5c464450bca85c5227e787 Mon Sep 17 00:00:00 2001 From: Gerald Leter Date: Fri, 1 Oct 2021 10:44:10 -0500 Subject: [PATCH 27/40] feat(feature-flags): improve "IN/NOT_IN"; new rule actions (#710) Co-authored-by: Gerald W. Lester Co-authored-by: heitorlessa --- .../utilities/feature_flags/feature_flags.py | 4 + .../utilities/feature_flags/schema.py | 8 +- docs/utilities/feature_flags.md | 25 ++- .../feature_flags/test_feature_flags.py | 203 ++++++++++++++++++ .../feature_flags/test_schema_validation.py | 20 ++ 5 files changed, 256 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py index d26144a262a..a4622fa9272 100644 --- a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py +++ b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py @@ -48,6 +48,10 @@ def _match_by_action(action: str, condition_value: Any, context_value: Any) -> b schema.RuleAction.ENDSWITH.value: lambda a, b: a.endswith(b), schema.RuleAction.IN.value: lambda a, b: a in b, schema.RuleAction.NOT_IN.value: lambda a, b: a not in b, + schema.RuleAction.KEY_IN_VALUE.value: lambda a, b: a in b, + schema.RuleAction.KEY_NOT_IN_VALUE.value: lambda a, b: a not in b, + schema.RuleAction.VALUE_IN_KEY.value: lambda a, b: b in a, + schema.RuleAction.VALUE_NOT_IN_KEY.value: lambda a, b: b not in a, } try: diff --git a/aws_lambda_powertools/utilities/feature_flags/schema.py b/aws_lambda_powertools/utilities/feature_flags/schema.py index efce82018db..68f8ebd9bca 100644 --- a/aws_lambda_powertools/utilities/feature_flags/schema.py +++ b/aws_lambda_powertools/utilities/feature_flags/schema.py @@ -22,6 +22,10 @@ class RuleAction(str, Enum): ENDSWITH = "ENDSWITH" IN = "IN" NOT_IN = "NOT_IN" + KEY_IN_VALUE = "KEY_IN_VALUE" + KEY_NOT_IN_VALUE = "KEY_NOT_IN_VALUE" + VALUE_IN_KEY = "VALUE_IN_KEY" + VALUE_NOT_IN_KEY = "VALUE_NOT_IN_KEY" class SchemaValidator(BaseValidator): @@ -80,7 +84,9 @@ class SchemaValidator(BaseValidator): The value MUST contain the following members: * **action**: `str`. Operation to perform to match a key and value. - The value MUST be either EQUALS, STARTSWITH, ENDSWITH, IN, NOT_IN + The value MUST be either EQUALS, STARTSWITH, ENDSWITH, + KEY_IN_VALUE KEY_NOT_IN_VALUE VALUE_IN_KEY VALUE_NOT_IN_KEY + * **key**: `str`. Key in given context to perform operation * **value**: `Any`. Value in given context that should match action operation. diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index a400bda8e0c..2836fb1759f 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -366,7 +366,7 @@ You can use `get_enabled_features` method for scenarios where you need a list of "when_match": true, "conditions": [ { - "action": "IN", + "action": "KEY_IN_VALUE", "key": "CloudFront-Viewer-Country", "value": ["NL", "IE", "UK", "PL", "PT"] } @@ -450,9 +450,20 @@ The `conditions` block is a list of conditions that contain `action`, `key`, and } ``` -The `action` configuration can have 5 different values: `EQUALS`, `STARTSWITH`, `ENDSWITH`, `IN`, `NOT_IN`. +The `action` configuration can have the following values, where the expressions **`a`** is the `key` and **`b`** is the `value` above: -The `key` and `value` will be compared to the input from the context parameter. +Action | Equivalent expression +------------------------------------------------- | --------------------------------------------------------------------------------- +**EQUALS** | `lambda a, b: a == b` +**STARTSWITH** | `lambda a, b: a.startswith(b)` +**ENDSWITH** | `lambda a, b: a.endswith(b)` +**KEY_IN_VALUE** | `lambda a, b: a in b` +**KEY_NOT_IN_VALUE** | `lambda a, b: a not in b` +**VALUE_IN_KEY** | `lambda a, b: b in a` +**VALUE_NOT_IN_KEY** | `lambda a, b: b not in a` + + +!!! info "The `**key**` and `**value**` will be compared to the input from the `**context**` parameter." **For multiple conditions**, we will evaluate the list of conditions as a logical `AND`, so all conditions needs to match to return `when_match` value. @@ -671,3 +682,11 @@ Method | When to use | Requires new deployment on changes | Supported services **[Environment variables](https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html){target="_blank"}** | Simple configuration that will rarely if ever change, because changing it requires a Lambda function deployment. | Yes | Lambda **[Parameters utility](parameters.md)** | Access to secrets, or fetch parameters in different formats from AWS System Manager Parameter Store or Amazon DynamoDB. | No | Parameter Store, DynamoDB, Secrets Manager, AppConfig **Feature flags utility** | Rule engine to define when one or multiple features should be enabled depending on the input. | No | AppConfig + + +## Deprecation list when GA + +Breaking change | Recommendation +------------------------------------------------- | --------------------------------------------------------------------------------- +`IN` RuleAction | Use `KEY_IN_VALUE` instead +`NOT_IN` RuleAction | Use `KEY_NOT_IN_VALUE` instead diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index 8b6698a8179..f6ce93abaa9 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -301,6 +301,8 @@ def test_flags_conditions_rule_match_multiple_actions_multiple_rules_multiple_co # check a case where the feature exists but the rule doesn't match so we revert to the default value of the feature + +# Check IN/NOT_IN/KEY_IN_VALUE/KEY_NOT_IN_VALUE/VALUE_IN_KEY/VALUE_NOT_IN_KEY conditions def test_flags_match_rule_with_in_action(mocker, config): expected_value = True mocked_app_config_schema = { @@ -397,6 +399,207 @@ def test_flags_no_match_rule_with_not_in_action(mocker, config): assert toggle == expected_value +def test_flags_match_rule_with_key_in_value_action(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": False, + "rules": { + "tenant id is contained in [6, 2]": { + "when_match": expected_value, + "conditions": [ + { + "action": RuleAction.KEY_IN_VALUE.value, + "key": "tenant_id", + "value": ["6", "2"], + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "6", "username": "a"}, default=False) + assert toggle == expected_value + + +def test_flags_no_match_rule_with_key_in_value_action(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "tenant id is contained in [8, 2]": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_IN_VALUE.value, + "key": "tenant_id", + "value": ["8", "2"], + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "6", "username": "a"}, default=False) + assert toggle == expected_value + + +def test_flags_match_rule_with_key_not_in_value_action(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": False, + "rules": { + "tenant id is contained in [8, 2]": { + "when_match": expected_value, + "conditions": [ + { + "action": RuleAction.KEY_NOT_IN_VALUE.value, + "key": "tenant_id", + "value": ["10", "4"], + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "6", "username": "a"}, default=False) + assert toggle == expected_value + + +def test_flags_no_match_rule_with_key_not_in_value_action(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "tenant id is contained in [8, 2]": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_NOT_IN_VALUE.value, + "key": "tenant_id", + "value": ["6", "4"], + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "6", "username": "a"}, default=False) + assert toggle == expected_value + + +def test_flags_match_rule_with_value_in_key_action(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": False, + "rules": { + "user is in the SYSADMIN group": { + "when_match": expected_value, + "conditions": [ + { + "action": RuleAction.VALUE_IN_KEY.value, + "key": "groups", + "value": "SYSADMIN", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate( + name="my_feature", context={"tenant_id": "6", "username": "a", "groups": ["SYSADMIN", "IT"]}, default=False + ) + assert toggle == expected_value + + +def test_flags_no_match_rule_with_value_in_key_action(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "tenant id is contained in [8, 2]": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.VALUE_IN_KEY.value, + "key": "groups", + "value": "GUEST", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate( + name="my_feature", context={"tenant_id": "6", "username": "a", "groups": ["SYSADMIN", "IT"]}, default=False + ) + assert toggle == expected_value + + +def test_flags_match_rule_with_value_not_in_key_action(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": False, + "rules": { + "user is in the GUEST group": { + "when_match": expected_value, + "conditions": [ + { + "action": RuleAction.VALUE_NOT_IN_KEY.value, + "key": "groups", + "value": "GUEST", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate( + name="my_feature", context={"tenant_id": "6", "username": "a", "groups": ["SYSADMIN", "IT"]}, default=False + ) + assert toggle == expected_value + + +def test_flags_no_match_rule_with_value_not_in_key_action(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "user is in the SYSADMIN group": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.VALUE_NOT_IN_KEY.value, + "key": "groups", + "value": "SYSADMIN", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate( + name="my_feature", context={"tenant_id": "6", "username": "a", "groups": ["SYSADMIN", "IT"]}, default=False + ) + assert toggle == expected_value + + +# Check multiple features def test_multiple_features_enabled(mocker, config): expected_value = ["my_feature", "my_feature2"] mocked_app_config_schema = { diff --git a/tests/functional/feature_flags/test_schema_validation.py b/tests/functional/feature_flags/test_schema_validation.py index ce85494afce..1cd14aa4287 100644 --- a/tests/functional/feature_flags/test_schema_validation.py +++ b/tests/functional/feature_flags/test_schema_validation.py @@ -220,6 +220,26 @@ def test_valid_condition_all_actions(): CONDITION_KEY: "username", CONDITION_VALUE: ["c"], }, + { + CONDITION_ACTION: RuleAction.KEY_IN_VALUE.value, + CONDITION_KEY: "username", + CONDITION_VALUE: ["a", "b"], + }, + { + CONDITION_ACTION: RuleAction.KEY_NOT_IN_VALUE.value, + CONDITION_KEY: "username", + CONDITION_VALUE: ["c"], + }, + { + CONDITION_ACTION: RuleAction.VALUE_IN_KEY.value, + CONDITION_KEY: "groups", + CONDITION_VALUE: "SYSADMIN", + }, + { + CONDITION_ACTION: RuleAction.VALUE_NOT_IN_KEY.value, + CONDITION_KEY: "groups", + CONDITION_VALUE: "GUEST", + }, ], } }, From 18450531b9c61bcc6b747aa1b75356966b8393ed Mon Sep 17 00:00:00 2001 From: Gerald Leter Date: Fri, 1 Oct 2021 11:28:56 -0500 Subject: [PATCH 28/40] feat(feature-flags): Bring your own logger for debug (#709) Co-authored-by: Dani Comnea Co-authored-by: heitorlessa Co-authored-by: Gerald W. Lester --- .../utilities/feature_flags/appconfig.py | 13 +++-- .../utilities/feature_flags/feature_flags.py | 47 ++++++++++--------- .../utilities/feature_flags/schema.py | 29 +++++++----- docs/utilities/feature_flags.md | 1 + 4 files changed, 53 insertions(+), 37 deletions(-) diff --git a/aws_lambda_powertools/utilities/feature_flags/appconfig.py b/aws_lambda_powertools/utilities/feature_flags/appconfig.py index df3f83c47aa..ff688dc6be5 100644 --- a/aws_lambda_powertools/utilities/feature_flags/appconfig.py +++ b/aws_lambda_powertools/utilities/feature_flags/appconfig.py @@ -1,17 +1,16 @@ import logging import traceback -from typing import Any, Dict, Optional, cast +from typing import Any, Dict, Optional, Union, cast from botocore.config import Config from aws_lambda_powertools.utilities.parameters import AppConfigProvider, GetParameterError, TransformParameterError +from ... import Logger from ...shared import jmespath_utils from .base import StoreProvider from .exceptions import ConfigurationStoreError, StoreClientError -logger = logging.getLogger(__name__) - TRANSFORM_TYPE = "json" @@ -25,6 +24,7 @@ def __init__( sdk_config: Optional[Config] = None, envelope: Optional[str] = "", jmespath_options: Optional[Dict] = None, + logger: Optional[Union[logging.Logger, Logger]] = None, ): """This class fetches JSON schemas from AWS AppConfig @@ -44,8 +44,11 @@ def __init__( JMESPath expression to pluck feature flags data from config jmespath_options : Optional[Dict] Alternative JMESPath options to be included when filtering expr + logger: A logging object + Used to log messages. If None is supplied, one will be created. """ super().__init__() + self.logger = logger or logging.getLogger(__name__) self.environment = environment self.application = application self.name = name @@ -60,6 +63,9 @@ def get_raw_configuration(self) -> Dict[str, Any]: """Fetch feature schema configuration from AWS AppConfig""" try: # parse result conf as JSON, keep in cache for self.max_age seconds + self.logger.debug( + "Fetching configuration from the store", extra={"param_name": self.name, "max_age": self.cache_seconds} + ) return cast( dict, self._conf_store.get( @@ -93,6 +99,7 @@ def get_configuration(self) -> Dict[str, Any]: config = self.get_raw_configuration if self.envelope: + self.logger.debug("Envelope enabled; extracting data from config", extra={"envelope": self.envelope}) config = jmespath_utils.extract_data_from_envelope( data=config, envelope=self.envelope, jmespath_options=self.jmespath_options ) diff --git a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py index a4622fa9272..01d3ce13639 100644 --- a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py +++ b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py @@ -1,15 +1,14 @@ import logging -from typing import Any, Dict, List, Optional, cast +from typing import Any, Dict, List, Optional, Union, cast +from ... import Logger from . import schema from .base import StoreProvider from .exceptions import ConfigurationStoreError -logger = logging.getLogger(__name__) - class FeatureFlags: - def __init__(self, store: StoreProvider): + def __init__(self, store: StoreProvider, logger: Optional[Union[logging.Logger, Logger]] = None): """Evaluates whether feature flags should be enabled based on a given context. It uses the provided store to fetch feature flag rules before evaluating them. @@ -35,11 +34,13 @@ def __init__(self, store: StoreProvider): ---------- store: StoreProvider Store to use to fetch feature flag schema configuration. + logger: A logging object + Used to log messages. If None is supplied, one will be created. """ self.store = store + self.logger = logger or logging.getLogger(__name__) - @staticmethod - def _match_by_action(action: str, condition_value: Any, context_value: Any) -> bool: + def _match_by_action(self, action: str, condition_value: Any, context_value: Any) -> bool: if not context_value: return False mapping_by_action = { @@ -58,7 +59,7 @@ def _match_by_action(action: str, condition_value: Any, context_value: Any) -> b func = mapping_by_action.get(action, lambda a, b: False) return func(context_value, condition_value) except Exception as exc: - logger.debug(f"caught exception while matching action: action={action}, exception={str(exc)}") + self.logger.debug(f"caught exception while matching action: action={action}, exception={str(exc)}") return False def _evaluate_conditions( @@ -69,7 +70,7 @@ def _evaluate_conditions( conditions = cast(List[Dict], rule.get(schema.CONDITIONS_KEY)) if not conditions: - logger.debug( + self.logger.debug( f"rule did not match, no conditions to match, rule_name={rule_name}, rule_value={rule_match_value}, " f"name={feature_name} " ) @@ -81,13 +82,13 @@ def _evaluate_conditions( cond_value = condition.get(schema.CONDITION_VALUE) if not self._match_by_action(action=cond_action, condition_value=cond_value, context_value=context_value): - logger.debug( + self.logger.debug( f"rule did not match action, rule_name={rule_name}, rule_value={rule_match_value}, " f"name={feature_name}, context_value={str(context_value)} " ) return False # context doesn't match condition - logger.debug(f"rule matched, rule_name={rule_name}, rule_value={rule_match_value}, name={feature_name}") + self.logger.debug(f"rule matched, rule_name={rule_name}, rule_value={rule_match_value}, name={feature_name}") return True def _evaluate_rules( @@ -98,12 +99,16 @@ def _evaluate_rules( rule_match_value = rule.get(schema.RULE_MATCH_VALUE) # Context might contain PII data; do not log its value - logger.debug(f"Evaluating rule matching, rule={rule_name}, feature={feature_name}, default={feat_default}") + self.logger.debug( + f"Evaluating rule matching, rule={rule_name}, feature={feature_name}, default={feat_default}" + ) if self._evaluate_conditions(rule_name=rule_name, feature_name=feature_name, rule=rule, context=context): return bool(rule_match_value) # no rule matched, return default value of feature - logger.debug(f"no rule matched, returning feature default, default={feat_default}, name={feature_name}") + self.logger.debug( + f"no rule matched, returning feature default, default={feat_default}, name={feature_name}" + ) return feat_default return False @@ -150,7 +155,7 @@ def get_configuration(self) -> Dict: ``` """ # parse result conf as JSON, keep in cache for max age defined in store - logger.debug(f"Fetching schema from registered store, store={self.store}") + self.logger.debug(f"Fetching schema from registered store, store={self.store}") config: Dict = self.store.get_configuration() validator = schema.SchemaValidator(schema=config) validator.validate() @@ -194,21 +199,21 @@ def evaluate(self, *, name: str, context: Optional[Dict[str, Any]] = None, defau try: features = self.get_configuration() except ConfigurationStoreError as err: - logger.debug(f"Failed to fetch feature flags from store, returning default provided, reason={err}") + self.logger.debug(f"Failed to fetch feature flags from store, returning default provided, reason={err}") return default feature = features.get(name) if feature is None: - logger.debug(f"Feature not found; returning default provided, name={name}, default={default}") + self.logger.debug(f"Feature not found; returning default provided, name={name}, default={default}") return default rules = feature.get(schema.RULES_KEY) feat_default = feature.get(schema.FEATURE_DEFAULT_VAL_KEY) if not rules: - logger.debug(f"no rules found, returning feature default, name={name}, default={feat_default}") + self.logger.debug(f"no rules found, returning feature default, name={name}, default={feat_default}") return bool(feat_default) - logger.debug(f"looking for rule match, name={name}, default={feat_default}") + self.logger.debug(f"looking for rule match, name={name}, default={feat_default}") return self._evaluate_rules(feature_name=name, context=context, feat_default=bool(feat_default), rules=rules) def get_enabled_features(self, *, context: Optional[Dict[str, Any]] = None) -> List[str]: @@ -245,20 +250,20 @@ def get_enabled_features(self, *, context: Optional[Dict[str, Any]] = None) -> L try: features: Dict[str, Any] = self.get_configuration() except ConfigurationStoreError as err: - logger.debug(f"Failed to fetch feature flags from store, returning empty list, reason={err}") + self.logger.debug(f"Failed to fetch feature flags from store, returning empty list, reason={err}") return features_enabled - logger.debug("Evaluating all features") + self.logger.debug("Evaluating all features") for name, feature in features.items(): rules = feature.get(schema.RULES_KEY, {}) feature_default_value = feature.get(schema.FEATURE_DEFAULT_VAL_KEY) if feature_default_value and not rules: - logger.debug(f"feature is enabled by default and has no defined rules, name={name}") + self.logger.debug(f"feature is enabled by default and has no defined rules, name={name}") features_enabled.append(name) elif self._evaluate_rules( feature_name=name, context=context, feat_default=feature_default_value, rules=rules ): - logger.debug(f"feature's calculated value is True, name={name}") + self.logger.debug(f"feature's calculated value is True, name={name}") features_enabled.append(name) return features_enabled diff --git a/aws_lambda_powertools/utilities/feature_flags/schema.py b/aws_lambda_powertools/utilities/feature_flags/schema.py index 68f8ebd9bca..fc745342750 100644 --- a/aws_lambda_powertools/utilities/feature_flags/schema.py +++ b/aws_lambda_powertools/utilities/feature_flags/schema.py @@ -1,12 +1,11 @@ import logging from enum import Enum -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union +from ... import Logger from .base import BaseValidator from .exceptions import SchemaValidationError -logger = logging.getLogger(__name__) - RULES_KEY = "rules" FEATURE_DEFAULT_VAL_KEY = "default" CONDITIONS_KEY = "conditions" @@ -111,11 +110,12 @@ class SchemaValidator(BaseValidator): ``` """ - def __init__(self, schema: Dict[str, Any]): + def __init__(self, schema: Dict[str, Any], logger: Optional[Union[logging.Logger, Logger]] = None): self.schema = schema + self.logger = logger or logging.getLogger(__name__) def validate(self) -> None: - logger.debug("Validating schema") + self.logger.debug("Validating schema") if not isinstance(self.schema, dict): raise SchemaValidationError(f"Features must be a dictionary, schema={str(self.schema)}") @@ -126,12 +126,13 @@ def validate(self) -> None: class FeaturesValidator(BaseValidator): """Validates each feature and calls RulesValidator to validate its rules""" - def __init__(self, schema: Dict): + def __init__(self, schema: Dict, logger: Optional[Union[logging.Logger, Logger]] = None): self.schema = schema + self.logger = logger or logging.getLogger(__name__) def validate(self): for name, feature in self.schema.items(): - logger.debug(f"Attempting to validate feature '{name}'") + self.logger.debug(f"Attempting to validate feature '{name}'") self.validate_feature(name, feature) rules = RulesValidator(feature=feature) rules.validate() @@ -149,21 +150,22 @@ def validate_feature(name, feature): class RulesValidator(BaseValidator): """Validates each rule and calls ConditionsValidator to validate each rule's conditions""" - def __init__(self, feature: Dict[str, Any]): + def __init__(self, feature: Dict[str, Any], logger: Optional[Union[logging.Logger, Logger]] = None): self.feature = feature self.feature_name = next(iter(self.feature)) self.rules: Optional[Dict] = self.feature.get(RULES_KEY) + self.logger = logger or logging.getLogger(__name__) def validate(self): if not self.rules: - logger.debug("Rules are empty, ignoring validation") + self.logger.debug("Rules are empty, ignoring validation") return if not isinstance(self.rules, dict): raise SchemaValidationError(f"Feature rules must be a dictionary, feature={self.feature_name}") for rule_name, rule in self.rules.items(): - logger.debug(f"Attempting to validate rule '{rule_name}'") + self.logger.debug(f"Attempting to validate rule '{rule_name}'") self.validate_rule(rule=rule, rule_name=rule_name, feature_name=self.feature_name) conditions = ConditionsValidator(rule=rule, rule_name=rule_name) conditions.validate() @@ -189,15 +191,18 @@ def validate_rule_default_value(rule: Dict, rule_name: str): class ConditionsValidator(BaseValidator): - def __init__(self, rule: Dict[str, Any], rule_name: str): + def __init__(self, rule: Dict[str, Any], rule_name: str, logger: Optional[Union[logging.Logger, Logger]] = None): self.conditions: List[Dict[str, Any]] = rule.get(CONDITIONS_KEY, {}) self.rule_name = rule_name + self.logger = logger or logging.getLogger(__name__) def validate(self): if not self.conditions or not isinstance(self.conditions, list): raise SchemaValidationError(f"Invalid condition, rule={self.rule_name}") for condition in self.conditions: + # Condition can contain PII data; do not log condition value + self.logger.debug(f"Attempting to validate condition for '{self.rule_name}'") self.validate_condition(rule_name=self.rule_name, condition=condition) @staticmethod @@ -205,8 +210,6 @@ def validate_condition(rule_name: str, condition: Dict[str, str]) -> None: if not condition or not isinstance(condition, dict): raise SchemaValidationError(f"Feature rule condition must be a dictionary, rule={rule_name}") - # Condition can contain PII data; do not log condition value - logger.debug(f"Attempting to validate condition for '{rule_name}'") ConditionsValidator.validate_condition_action(condition=condition, rule_name=rule_name) ConditionsValidator.validate_condition_key(condition=condition, rule_name=rule_name) ConditionsValidator.validate_condition_value(condition=condition, rule_name=rule_name) diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 2836fb1759f..7e08cc358dd 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -580,6 +580,7 @@ Parameter | Default | Description **max_age** | `5` | Number of seconds to cache feature flags configuration fetched from AWS AppConfig **sdk_config** | `None` | [Botocore Config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html){target="_blank"} **jmespath_options** | `None` | For advanced use cases when you want to bring your own [JMESPath functions](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} +**logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools Logger. === "appconfig_store_example.py" From ba68bc8704a467ffdc91e1d227aad2741c87d140 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 1 Oct 2021 18:40:56 +0200 Subject: [PATCH 29/40] fix(idempotency): sorting keys before hashing (#722) --- aws_lambda_powertools/logging/formatter.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index e35c9a7a327..246fa171d4e 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -222,7 +222,7 @@ def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict[str, Any]: record_dict["asctime"] = self.formatTime(record=log_record, datefmt=self.datefmt) extras = {k: v for k, v in record_dict.items() if k not in RESERVED_LOG_ATTRS} - formatted_log = {**extras} + formatted_log = {} # Iterate over a default or existing log structure # then replace any std log attribute e.g. '%(level)s' to 'INFO', '%(process)d to '4773' @@ -233,6 +233,7 @@ def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict[str, Any]: else: formatted_log[key] = value + formatted_log.update(**extras) return formatted_log @staticmethod From 610f56993ca9e264bd9af608c5f56535faf6313d Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 1 Oct 2021 18:40:56 +0200 Subject: [PATCH 30/40] fix(logger): push extra keys to the end (#722) --- aws_lambda_powertools/logging/formatter.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index e35c9a7a327..246fa171d4e 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -222,7 +222,7 @@ def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict[str, Any]: record_dict["asctime"] = self.formatTime(record=log_record, datefmt=self.datefmt) extras = {k: v for k, v in record_dict.items() if k not in RESERVED_LOG_ATTRS} - formatted_log = {**extras} + formatted_log = {} # Iterate over a default or existing log structure # then replace any std log attribute e.g. '%(level)s' to 'INFO', '%(process)d to '4773' @@ -233,6 +233,7 @@ def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict[str, Any]: else: formatted_log[key] = value + formatted_log.update(**extras) return formatted_log @staticmethod From 47ae544963fbaedda2b65b55c1837f34dd898ba7 Mon Sep 17 00:00:00 2001 From: Ran Isenberg <60175085+risenberg-cyberark@users.noreply.github.com> Date: Sun, 3 Oct 2021 21:24:48 +0300 Subject: [PATCH 31/40] fix(feature-flags): rules should evaluate with an AND op (#724) --- .../utilities/feature_flags/feature_flags.py | 9 +++------ tests/functional/feature_flags/test_feature_flags.py | 8 ++++---- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py index 01d3ce13639..5a91ba26840 100644 --- a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py +++ b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py @@ -105,12 +105,9 @@ def _evaluate_rules( if self._evaluate_conditions(rule_name=rule_name, feature_name=feature_name, rule=rule, context=context): return bool(rule_match_value) - # no rule matched, return default value of feature - self.logger.debug( - f"no rule matched, returning feature default, default={feat_default}, name={feature_name}" - ) - return feat_default - return False + # no rule matched, return default value of feature + self.logger.debug(f"no rule matched, returning feature default, default={feat_default}, name={feature_name}") + return feat_default def get_configuration(self) -> Dict: """Get validated feature flag schema from configured store. diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index f6ce93abaa9..73702de93b2 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -233,9 +233,9 @@ def test_flags_conditions_no_rule_match_equal_multiple_conditions(mocker, config # check rule match for multiple of action types def test_flags_conditions_rule_match_multiple_actions_multiple_rules_multiple_conditions(mocker, config): expected_value_first_check = True - expected_value_second_check = False + expected_value_second_check = True expected_value_third_check = False - expected_value_fourth_case = False + expected_value_fourth_check = False mocked_app_config_schema = { "my_feature": { "default": expected_value_third_check, @@ -295,9 +295,9 @@ def test_flags_conditions_rule_match_multiple_actions_multiple_rules_multiple_co toggle = feature_flags.evaluate( name="my_fake_feature", context={"tenant_id": "11114446", "username": "ab"}, - default=expected_value_fourth_case, + default=expected_value_fourth_check, ) - assert toggle == expected_value_fourth_case + assert toggle == expected_value_fourth_check # check a case where the feature exists but the rule doesn't match so we revert to the default value of the feature From 922ecf27a11b7ca2f68ec097128041f9964da540 Mon Sep 17 00:00:00 2001 From: Gerald Leter Date: Sun, 3 Oct 2021 14:37:47 -0500 Subject: [PATCH 32/40] feat(feature_flags): Added inequality conditions (#721) --- .../utilities/feature_flags/feature_flags.py | 5 + .../utilities/feature_flags/schema.py | 5 + docs/utilities/feature_flags.md | 5 + .../feature_flags/test_feature_flags.py | 335 ++++++++++++++++++ 4 files changed, 350 insertions(+) diff --git a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py index 5a91ba26840..c66feee0536 100644 --- a/aws_lambda_powertools/utilities/feature_flags/feature_flags.py +++ b/aws_lambda_powertools/utilities/feature_flags/feature_flags.py @@ -45,6 +45,11 @@ def _match_by_action(self, action: str, condition_value: Any, context_value: Any return False mapping_by_action = { schema.RuleAction.EQUALS.value: lambda a, b: a == b, + schema.RuleAction.NOT_EQUALS.value: lambda a, b: a != b, + schema.RuleAction.KEY_GREATER_THAN_VALUE.value: lambda a, b: a > b, + schema.RuleAction.KEY_GREATER_THAN_OR_EQUAL_VALUE.value: lambda a, b: a >= b, + schema.RuleAction.KEY_LESS_THAN_VALUE.value: lambda a, b: a < b, + schema.RuleAction.KEY_LESS_THAN_OR_EQUAL_VALUE.value: lambda a, b: a <= b, schema.RuleAction.STARTSWITH.value: lambda a, b: a.startswith(b), schema.RuleAction.ENDSWITH.value: lambda a, b: a.endswith(b), schema.RuleAction.IN.value: lambda a, b: a in b, diff --git a/aws_lambda_powertools/utilities/feature_flags/schema.py b/aws_lambda_powertools/utilities/feature_flags/schema.py index fc745342750..6a92508676e 100644 --- a/aws_lambda_powertools/utilities/feature_flags/schema.py +++ b/aws_lambda_powertools/utilities/feature_flags/schema.py @@ -17,6 +17,11 @@ class RuleAction(str, Enum): EQUALS = "EQUALS" + NOT_EQUALS = "NOT_EQUALS" + KEY_GREATER_THAN_VALUE = "KEY_GREATER_THAN_VALUE" + KEY_GREATER_THAN_OR_EQUAL_VALUE = "KEY_GREATER_THAN_OR_EQUAL_VALUE" + KEY_LESS_THAN_VALUE = "KEY_LESS_THAN_VALUE" + KEY_LESS_THAN_OR_EQUAL_VALUE = "KEY_LESS_THAN_OR_EQUAL_VALUE" STARTSWITH = "STARTSWITH" ENDSWITH = "ENDSWITH" IN = "IN" diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 7e08cc358dd..816aac8b817 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -455,6 +455,11 @@ The `action` configuration can have the following values, where the expressions Action | Equivalent expression ------------------------------------------------- | --------------------------------------------------------------------------------- **EQUALS** | `lambda a, b: a == b` +**NOT_EQUALS** | `lambda a, b: a != b` +**KEY_GREATER_THAN_VALUE** | `lambda a, b: a > b` +**KEY_GREATER_THAN_OR_EQUAL_VALUE** | `lambda a, b: a >= b` +**KEY_LESS_THAN_VALUE** | `lambda a, b: a < b` +**KEY_LESS_THAN_OR_EQUAL_VALUE** | `lambda a, b: a <= b` **STARTSWITH** | `lambda a, b: a.startswith(b)` **ENDSWITH** | `lambda a, b: a.endswith(b)` **KEY_IN_VALUE** | `lambda a, b: a in b` diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index 73702de93b2..c421cc85423 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -802,3 +802,338 @@ def test_get_configuration_with_envelope_and_raw(mocker, config): assert "log_level" in config assert "log_level" not in features_config + +## +## Inequality test cases +## + +# Test not equals +def test_flags_not_equal_no_match(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "tenant id not equals 345345435": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.NOT_EQUALS.value, + "key": "tenant_id", + "value": "345345435", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a"}, default=False) + assert toggle == expected_value + +def test_flags_not_equal_match(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "tenant id not equals 345345435": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.NOT_EQUALS.value, + "key": "tenant_id", + "value": "345345435", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "", "username": "a"}, default=False) + assert toggle == expected_value + + +# Test less than +def test_flags_less_than_no_match_1(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date less than 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_LESS_THAN_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + assert toggle == expected_value + +def test_flags_less_than_no_match_2(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date less than 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_LESS_THAN_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + assert toggle == expected_value + +def test_flags_less_than_match(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date less than 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_LESS_THAN_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + assert toggle == expected_value + +# Test less than or equal to +def test_flags_less_than_or_equal_no_match(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date less than or equal 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_LESS_THAN_OR_EQUAL_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + assert toggle == expected_value + +def test_flags_less_than_or_equal_match_1(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date less than or equal 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_LESS_THAN_OR_EQUAL_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + assert toggle == expected_value + + +def test_flags_less_than_or_equal_match_2(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date less than or equal 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_LESS_THAN_OR_EQUAL_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + assert toggle == expected_value + +# Test greater than +def test_flags_greater_than_no_match_1(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date greater than 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_GREATER_THAN_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + assert toggle == expected_value + +def test_flags_greater_than_no_match_2(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date greater than 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_GREATER_THAN_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + assert toggle == expected_value + +def test_flags_greater_than_match(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date greater than 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_GREATER_THAN_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + assert toggle == expected_value + +# Test greater than or equal to +def test_flags_greater_than_or_equal_no_match(mocker, config): + expected_value = False + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date greater than or equal 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_GREATER_THAN_OR_EQUAL_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + assert toggle == expected_value + +def test_flags_greater_than_or_equal_match_1(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date greater than or equal 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_GREATER_THAN_OR_EQUAL_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + assert toggle == expected_value + + +def test_flags_greater_than_or_equal_match_2(mocker, config): + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "Date greater than or equal 2021.10.31": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.KEY_GREATER_THAN_OR_EQUAL_VALUE.value, + "key": "current_date", + "value": "2021.10.31", + } + ], + } + }, + } + } + feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) + toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + assert toggle == expected_value + From b97d96975468e7c5759a4674dd3266acdb319b52 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 5 Oct 2021 10:43:28 +0200 Subject: [PATCH 33/40] docs(parser): fix incorrect import in root_validator example (#735) --- docs/utilities/parser.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 47f87e355bb..9f1bed3c0cb 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -448,7 +448,7 @@ Alternatively, you can pass `'*'` as an argument for the decorator so that you c === "validate_all_field_values.py" ```python - from aws_lambda_powertools.utilities.parser import parse, BaseModel, validator + from aws_lambda_powertools.utilities.parser import parse, BaseModel, root_validator class UserModel(BaseModel): username: str From 5fa43ca3f6f930f0520d44e22d6631ebdb942459 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 08:43:42 +0000 Subject: [PATCH 34/40] chore(deps-dev): bump mkdocs-material from 7.3.0 to 7.3.1 (#731) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 7.3.0 to 7.3.1.
Release notes

Sourced from mkdocs-material's releases.

mkdocs-material-7.3.1

  • Added new experimental content tabs implementation
  • Fixed #3069: GitHub stats broken for users/orgs (7.1.0 regression)
  • Fixed #3070: Sections not linking to index page
  • Fixed title not linking to index page when using tabs
  • Fixed Disqus integration when using instant loading
  • Fixed some spacing issues for right-to-left languages
  • Fixed syntax error in Serbian translations
Changelog

Sourced from mkdocs-material's changelog.

7.3.1 _ October 2, 2021

  • Added new experimental content tabs implementation
  • Fixed #3069: GitHub stats broken for users/orgs (7.1.0 regression)
  • Fixed #3070: Sections not linking to index page
  • Fixed title not linking to index page when using tabs
  • Fixed Disqus integration when using instant loading
  • Fixed some spacing issues for right-to-left languages
  • Fixed syntax error in Serbian translations
Commits
  • 2fbf0e9 Prepare 7.3.1 release
  • eaa6e91 Re-use Disqus configuration
  • e71b129 Fixed Disqus integration with instant loading
  • 2d3354b Fixed some spacing issues for right-to-left languages
  • 12666a3 Fixed incompatibility between navigation.sections and navigation.indexes
  • ef0979f Fixed broken GitHub stats for organizations (7.1.0 regression)
  • 32ab26a Fixed Serbian translations
  • c037a0e Merge branch 'master' of github.com:squidfunk/mkdocs-material
  • c541e68 Bumped Python Markdown Extensions and switched to alternate style
  • 0d2d568 Merge pull request #3057 from majkinetor/master
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mkdocs-material&package-manager=pip&previous-version=7.3.0&new-version=7.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 16 ++++++++-------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 374ef8ad735..e5fe82ab49b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -577,7 +577,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.3.0" +version = "7.3.1" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -588,7 +588,7 @@ markdown = ">=3.2" mkdocs = ">=1.2.2" mkdocs-material-extensions = ">=1.0" Pygments = ">=2.4" -pymdown-extensions = ">=7.0" +pymdown-extensions = ">=9.0" [[package]] name = "mkdocs-material-extensions" @@ -730,7 +730,7 @@ python-versions = ">=3.5" [[package]] name = "pymdown-extensions" -version = "8.2" +version = "9.0" description = "Extension pack for Python Markdown." category = "dev" optional = false @@ -1047,7 +1047,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "c55d59c37d135eeba1ae059cbfe702c6167dd68645d1001bc90482dddfde8ab4" +content-hash = "051e2505e2df84e7715e8c160fac3fb984a1e274ca2cbff43fc36962538c2e43" [metadata.files] appdirs = [ @@ -1337,8 +1337,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.3.0.tar.gz", hash = "sha256:07db0580fa96c3473aee99ec3fb4606a1a5a1e4f4467e64c0cd1ba8da5b6476e"}, - {file = "mkdocs_material-7.3.0-py2.py3-none-any.whl", hash = "sha256:b183c27dc0f44e631bbc32c51057f61a3e2ba8b3c1080e59f944167eeba9ff1d"}, + {file = "mkdocs-material-7.3.1.tar.gz", hash = "sha256:d1ab269da2025f22b8fba079d7eadc05cd97ac2a21d87b09d414e69915f247a7"}, + {file = "mkdocs_material-7.3.1-py2.py3-none-any.whl", hash = "sha256:8d59c8ac241d59eef1a883c49ca685c8d8446eb054675a212fb748daff24099c"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, @@ -1433,8 +1433,8 @@ pygments = [ {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, ] pymdown-extensions = [ - {file = "pymdown-extensions-8.2.tar.gz", hash = "sha256:b6daa94aad9e1310f9c64c8b1f01e4ce82937ab7eb53bfc92876a97aca02a6f4"}, - {file = "pymdown_extensions-8.2-py3-none-any.whl", hash = "sha256:141452d8ed61165518f2c923454bf054866b85cf466feedb0eb68f04acdc2560"}, + {file = "pymdown-extensions-9.0.tar.gz", hash = "sha256:01e4bec7f4b16beaba0087a74496401cf11afd69e3a11fe95cb593e5c698ef40"}, + {file = "pymdown_extensions-9.0-py3-none-any.whl", hash = "sha256:430cc2fbb30cef2df70edac0b4f62614a6a4d2b06462e32da4ca96098b7c1dfb"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, diff --git a/pyproject.toml b/pyproject.toml index 2f2c2549d9e..ee1abeef683 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ radon = "^5.1.0" xenon = "^0.8.0" flake8-eradicate = "^1.1.0" flake8-bugbear = "^21.9.2" -mkdocs-material = "^7.3.0" +mkdocs-material = "^7.3.1" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" mypy = "^0.910" From dec3c883e5aa25e247fb258158f3e6a96e370a67 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 09:08:05 +0000 Subject: [PATCH 35/40] chore(deps): bump boto3 from 1.18.51 to 1.18.54 (#733) Bumps [boto3](https://github.com/boto/boto3) from 1.18.51 to 1.18.54.
Changelog

Sourced from boto3's changelog.

1.18.54

  • api-change:codebuild: [botocore] CodeBuild now allows you to select how batch build statuses are sent to the source provider for a project.
  • api-change:efs: [botocore] Update efs client to latest version
  • api-change:kms: [botocore] Added SDK examples for ConnectCustomKeyStore, CreateCustomKeyStore, CreateKey, DeleteCustomKeyStore, DescribeCustomKeyStores, DisconnectCustomKeyStore, GenerateDataKeyPair, GenerateDataKeyPairWithoutPlaintext, GetPublicKey, ReplicateKey, Sign, UpdateCustomKeyStore and Verify APIs

1.18.53

  • api-change:synthetics: [botocore] CloudWatch Synthetics now enables customers to choose a customer managed AWS KMS key or an Amazon S3-managed key instead of an AWS managed key (default) for the encryption of artifacts that the canary stores in Amazon S3. CloudWatch Synthetics also supports artifact S3 location updation now.
  • api-change:ssm: [botocore] When "AutoApprovable" is true for a Change Template, then specifying --auto-approve (boolean) in Start-Change-Request-Execution will create a change request that bypasses approver review. (except for change calendar restrictions)
  • api-change:apprunner: [botocore] This release contains several minor bug fixes.

1.18.52

  • api-change:network-firewall: [botocore] This release adds support for strict ordering for stateful rule groups. Using strict ordering, stateful rules are evaluated in the exact order in which you provide them.
  • api-change:dataexchange: [botocore] This release enables subscribers to set up automatic exports of newly published revisions using the new EventAction API.
  • api-change:workmail: [botocore] This release adds support for mobile device access overrides management in Amazon WorkMail.
  • api-change:account: [botocore] This release of the Account Management API enables customers to manage the alternate contacts for their AWS accounts. For more information, see https://docs.aws.amazon.com/accounts/latest/reference/accounts-welcome.html
  • api-change:workspaces: [botocore] Added CreateUpdatedWorkspaceImage API to update WorkSpace images with latest software and drivers. Updated DescribeWorkspaceImages API to display if there are updates available for WorkSpace images.
  • api-change:cloudcontrol: [botocore] Initial release of the SDK for AWS Cloud Control API
  • api-change:macie2: [botocore] Amazon S3 bucket metadata now indicates whether an error or a bucket's permissions settings prevented Amazon Macie from retrieving data about the bucket or the bucket's objects.
Commits
  • 87fe415 Merge branch 'release-1.18.54'
  • 7ff28c2 Bumping version to 1.18.54
  • a1ed36e Add changelog entries from botocore
  • d65cf5e Merge branch 'release-1.18.53'
  • 14051fa Merge branch 'release-1.18.53' into develop
  • 27064b0 Bumping version to 1.18.53
  • d6bc84b Add changelog entries from botocore
  • a77c7be Merge branch 'release-1.18.52'
  • d511d6d Merge branch 'release-1.18.52' into develop
  • 9956e41 Bumping version to 1.18.52
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=boto3&package-manager=pip&previous-version=1.18.51&new-version=1.18.54)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index e5fe82ab49b..d8524392f55 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,14 +81,14 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.18.51" +version = "1.18.54" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.21.51,<1.22.0" +botocore = ">=1.21.54,<1.22.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -97,7 +97,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.21.51" +version = "1.21.54" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -1074,12 +1074,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.18.51-py3-none-any.whl", hash = "sha256:83d6f539e0f0e0f0c532bb2b11d1e9c5055d1d806d64a61aff4f49399c294ee7"}, - {file = "boto3-1.18.51.tar.gz", hash = "sha256:98279095b1d08ee6d8d587f2c66fda6d560ad3046e98cd140c1aa8e1ed018c70"}, + {file = "boto3-1.18.54-py3-none-any.whl", hash = "sha256:f22a77098cd70ddf848df6981ec57b92178e9d8eb74637edbdf4173bfa9279fa"}, + {file = "boto3-1.18.54.tar.gz", hash = "sha256:2d81dc484020059fc738165984304107d4db1c6774b6310d08c892a1751f6980"}, ] botocore = [ - {file = "botocore-1.21.51-py3-none-any.whl", hash = "sha256:2089f9fa36a59d8c02435c49d58ccc7b3ceb9c0c054ea4f71631c3c3a1c5245e"}, - {file = "botocore-1.21.51.tar.gz", hash = "sha256:17a10dd33334e7e3aaa4e12f66317284f96bb53267e20bc877a187c442681772"}, + {file = "botocore-1.21.54-py3-none-any.whl", hash = "sha256:56b74a5186bec835baf580b2d062ea1738d3ff5a573653d41b3ad1598a5b77c4"}, + {file = "botocore-1.21.54.tar.gz", hash = "sha256:46127b3a385d0ec73d1994b8958b23b79e0613e12c486371a100df992b72a1b9"}, ] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, From af36fb52818bcb424b747e1bb9c97026a89d65f9 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 5 Oct 2021 13:12:34 +0200 Subject: [PATCH 36/40] feat: expose jmespath powertools functions (#736) --- aws_lambda_powertools/logging/logger.py | 2 +- .../utilities/feature_flags/appconfig.py | 2 +- .../utilities/idempotency/persistence/base.py | 2 +- .../jmespath_utils/__init__.py} | 24 +++- .../utilities/jmespath_utils/envelopes.py | 8 ++ .../utilities/validation/validator.py | 3 +- docs/utilities/idempotency.md | 2 +- docs/utilities/jmespath_functions.md | 131 +++++++++++++++--- .../feature_flags/test_feature_flags.py | 94 ++++++++++--- tests/functional/idempotency/conftest.py | 2 +- 10 files changed, 229 insertions(+), 41 deletions(-) rename aws_lambda_powertools/{shared/jmespath_utils.py => utilities/jmespath_utils/__init__.py} (66%) create mode 100644 aws_lambda_powertools/utilities/jmespath_utils/envelopes.py diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index e8b67a2ca7e..0b9b52f8824 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -446,7 +446,7 @@ def set_package_logger( ------- **Enables debug logging for AWS Lambda Powertools package** - >>> from aws_lambda_powertools.logging.logger import set_package_logger + >>> aws_lambda_powertools.logging.logger import set_package_logger >>> set_package_logger() Parameters diff --git a/aws_lambda_powertools/utilities/feature_flags/appconfig.py b/aws_lambda_powertools/utilities/feature_flags/appconfig.py index ff688dc6be5..dd581df9e22 100644 --- a/aws_lambda_powertools/utilities/feature_flags/appconfig.py +++ b/aws_lambda_powertools/utilities/feature_flags/appconfig.py @@ -4,10 +4,10 @@ from botocore.config import Config +from aws_lambda_powertools.utilities import jmespath_utils from aws_lambda_powertools.utilities.parameters import AppConfigProvider, GetParameterError, TransformParameterError from ... import Logger -from ...shared import jmespath_utils from .base import StoreProvider from .exceptions import ConfigurationStoreError, StoreClientError diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 4901e9f9f75..907af8edaa7 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -16,7 +16,6 @@ from aws_lambda_powertools.shared import constants from aws_lambda_powertools.shared.cache_dict import LRUDict -from aws_lambda_powertools.shared.jmespath_utils import PowertoolsFunctions from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.idempotency.config import IdempotencyConfig from aws_lambda_powertools.utilities.idempotency.exceptions import ( @@ -25,6 +24,7 @@ IdempotencyKeyError, IdempotencyValidationError, ) +from aws_lambda_powertools.utilities.jmespath_utils import PowertoolsFunctions logger = logging.getLogger(__name__) diff --git a/aws_lambda_powertools/shared/jmespath_utils.py b/aws_lambda_powertools/utilities/jmespath_utils/__init__.py similarity index 66% rename from aws_lambda_powertools/shared/jmespath_utils.py rename to aws_lambda_powertools/utilities/jmespath_utils/__init__.py index bbb3b38fe04..a8d210bc1e0 100644 --- a/aws_lambda_powertools/shared/jmespath_utils.py +++ b/aws_lambda_powertools/utilities/jmespath_utils/__init__.py @@ -30,8 +30,27 @@ def _func_powertools_base64_gzip(self, value): return uncompressed.decode() -def extract_data_from_envelope(data: Union[Dict, str], envelope: str, jmespath_options: Optional[Dict]) -> Any: - """Searches data using JMESPath expression +def extract_data_from_envelope(data: Union[Dict, str], envelope: str, jmespath_options: Optional[Dict] = None) -> Any: + """Searches and extracts data using JMESPath + + Envelope being the JMESPath expression to extract the data you're after + + Built-in JMESPath functions include: powertools_json, powertools_base64, powertools_base64_gzip + + Examples + -------- + + **Deserialize JSON string and extracts data from body key** + + from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope + from aws_lambda_powertools.utilities.typing import LambdaContext + + + def handler(event: dict, context: LambdaContext): + # event = {"body": "{\"customerId\":\"dd4649e6-2484-4993-acb8-0f9123103394\"}"} # noqa: E800 + payload = extract_data_from_envelope(data=event, envelope="powertools_json(body)") + customer = payload.get("customerId") # now deserialized + ... Parameters ---------- @@ -42,6 +61,7 @@ def extract_data_from_envelope(data: Union[Dict, str], envelope: str, jmespath_o jmespath_options : Dict Alternative JMESPath options to be included when filtering expr + Returns ------- Any diff --git a/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py b/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py new file mode 100644 index 00000000000..df50e5f98d4 --- /dev/null +++ b/aws_lambda_powertools/utilities/jmespath_utils/envelopes.py @@ -0,0 +1,8 @@ +API_GATEWAY_REST = "powertools_json(body)" +API_GATEWAY_HTTP = API_GATEWAY_REST +SQS = "Records[*].powertools_json(body)" +SNS = "Records[0].Sns.Message | powertools_json(@)" +EVENTBRIDGE = "detail" +CLOUDWATCH_EVENTS_SCHEDULED = EVENTBRIDGE +KINESIS_DATA_STREAM = "Records[*].kinesis.powertools_json(powertools_base64(data))" +CLOUDWATCH_LOGS = "awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]" diff --git a/aws_lambda_powertools/utilities/validation/validator.py b/aws_lambda_powertools/utilities/validation/validator.py index d9ce35fe41b..aab383eeb45 100644 --- a/aws_lambda_powertools/utilities/validation/validator.py +++ b/aws_lambda_powertools/utilities/validation/validator.py @@ -1,8 +1,9 @@ import logging from typing import Any, Callable, Dict, Optional, Union +from aws_lambda_powertools.utilities import jmespath_utils + from ...middleware_factory import lambda_handler_decorator -from ...shared import jmespath_utils from .base import validate_data_against_schema logger = logging.getLogger(__name__) diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index bf06e3292b7..43eb1ac3a0b 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -209,7 +209,7 @@ Imagine the function executes successfully, but the client never receives the re !!! warning "Idempotency for JSON payloads" The payload extracted by the `event_key_jmespath` is treated as a string by default, so will be sensitive to differences in whitespace even when the JSON payload itself is identical. - To alter this behaviour, we can use the [JMESPath built-in function](/utilities/jmespath_functions) *powertools_json()* to treat the payload as a JSON object rather than a string. + To alter this behaviour, we can use the [JMESPath built-in function](jmespath_functions.md#powertools_json-function) `powertools_json()` to treat the payload as a JSON object rather than a string. === "payment.py" diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md index 7ef6b2b32b2..e11452cd5cc 100644 --- a/docs/utilities/jmespath_functions.md +++ b/docs/utilities/jmespath_functions.md @@ -3,9 +3,106 @@ title: JMESPath Functions description: Utility --- -You might have events or responses that contain non-encoded JSON, where you need to decode so that you can access portions of the object or ensure the Powertools utility receives a JSON object. This is a common use case when using the [validation](/utilities/validation) or [idempotency](/utilities/idempotency) utilities. +!!! tip "JMESPath is a query language for JSON used by AWS CLI, AWS Python SDK, and AWS Lambda Powertools for Python." -## Built-in JMESPath functions +Built-in [JMESPath](https://jmespath.org/){target="_blank"} Functions to easily deserialize common encoded JSON payloads in Lambda functions. + +## Key features + +* Deserialize JSON from JSON strings, base64, and compressed data +* Use JMESPath to extract and combine data recursively + +## Getting started + +You might have events that contains encoded JSON payloads as string, base64, or even in compressed format. It is a common use case to decode and extract them partially or fully as part of your Lambda function invocation. + +Lambda Powertools also have utilities like [validation](validation.md), [idempotency](idempotency.md), or [feature flags](feature_flags.md) where you might need to extract a portion of your data before using them. + +### Extracting data + +You can use the `extract_data_from_envelope` function along with any [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank"}. + +=== "app.py" + + ```python hl_lines="1 7" + from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope + + from aws_lambda_powertools.utilities.typing import LambdaContext + + + def handler(event: dict, context: LambdaContext): + payload = extract_data_from_envelope(data=event, envelope="powertools_json(body)") + customer = payload.get("customerId") # now deserialized + ... + ``` + +=== "event.json" + + ```json + { + "body": "{\"customerId\":\"dd4649e6-2484-4993-acb8-0f9123103394\"}" + } + ``` + +### Built-in envelopes + +We provide built-in envelopes for popular JMESPath expressions used when looking to decode/deserialize JSON objects within AWS Lambda Event Sources. + +=== "app.py" + + ```python hl_lines="1 7" + from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope, envelopes + + from aws_lambda_powertools.utilities.typing import LambdaContext + + + def handler(event: dict, context: LambdaContext): + payload = extract_data_from_envelope(data=event, envelope=envelopes.SNS) + customer = payload.get("customerId") # now deserialized + ... + ``` + +=== "event.json" + + ```json hl_lines="6" + { + "Records": [ + { + "messageId": "19dd0b57-b21e-4ac1-bd88-01bbb068cb78", + "receiptHandle": "MessageReceiptHandle", + "body": "{\"customerId\":\"dd4649e6-2484-4993-acb8-0f9123103394\",\"booking\":{\"id\":\"5b2c4803-330b-42b7-811a-c68689425de1\",\"reference\":\"ySz7oA\",\"outboundFlightId\":\"20c0d2f2-56a3-4068-bf20-ff7703db552d\"},\"payment\":{\"receipt\":\"https:\/\/pay.stripe.com\/receipts\/acct_1Dvn7pF4aIiftV70\/ch_3JTC14F4aIiftV700iFq2CHB\/rcpt_K7QsrFln9FgFnzUuBIiNdkkRYGxUL0X\",\"amount\":100}}", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1523232000000", + "SenderId": "123456789012", + "ApproximateFirstReceiveTimestamp": "1523232000001" + }, + "messageAttributes": {}, + "md5OfBody": "7b270e59b47ff90a553787216d55d91d", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:MyQueue", + "awsRegion": "us-east-1" + } + ] + } + ``` + +These are all built-in envelopes you can use along with their expression as a reference: + +Envelope | JMESPath expression +------------------------------------------------- | --------------------------------------------------------------------------------- +**`API_GATEWAY_REST`** | `powertools_json(body)` +**`API_GATEWAY_HTTP`** | `API_GATEWAY_REST` +**`SQS`** | `Records[*].powertools_json(body)` +**`SNS`** | `Records[0].Sns.Message | powertools_json(@)` +**`EVENTBRIDGE`** | `detail` +**`CLOUDWATCH_EVENTS_SCHEDULED`** | `EVENTBRIDGE` +**`KINESIS_DATA_STREAM`** | `Records[*].kinesis.powertools_json(powertools_base64(data))` +**`CLOUDWATCH_LOGS`** | `awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]` + +## Advanced + +### Built-in JMESPath functions You can use our built-in JMESPath functions within your expressions to do exactly that to decode JSON Strings, base64, and uncompress gzip data. !!! info @@ -134,33 +231,35 @@ This sample will decompress and decode base64 data, then use JMESPath pipeline e !!! warning This should only be used for advanced use cases where you have special formats not covered by the built-in functions. - This will **replace all provided built-in functions such as `powertools_json`, so you will no longer be able to use them**. - For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. -=== "custom_jmespath_function.py" +In order to keep the built-in functions from Powertools, you can subclass from `PowertoolsFunctions`: - ```python hl_lines="2 6-10 14" - from aws_lambda_powertools.utilities.validation import validator - from jmespath import functions +=== "custom_jmespath_function.py" - import schemas + ```python hl_lines="2-3 6-9 11 17" + from aws_lambda_powertools.utilities.jmespath_utils import ( + PowertoolsFunctions, extract_data_from_envelope) + from jmespath.functions import signature - class CustomFunctions(functions.Functions): - @functions.signature({'types': ['string']}) + class CustomFunctions(PowertoolsFunctions): + @signature({'types': ['string']}) # Only decode if value is a string def _func_special_decoder(self, s): return my_custom_decoder_logic(s) custom_jmespath_options = {"custom_functions": CustomFunctions()} - @validator(schema=schemas.INPUT, jmespath_options=**custom_jmespath_options) def handler(event, context): - return event + # use the custom name after `_func_` + extract_data_from_envelope(data=event, + envelope="special_decoder(body)", + jmespath_options=**custom_jmespath_options) + ... ``` -=== "schemas.py" +=== "event.json" - ```python hl_lines="7 14 16 23 39 45 47 52" - --8<-- "docs/shared/validation_basic_jsonschema.py" + ```json + {"body": "custom_encoded_data"} ``` diff --git a/tests/functional/feature_flags/test_feature_flags.py b/tests/functional/feature_flags/test_feature_flags.py index c421cc85423..8381dc6bf1d 100644 --- a/tests/functional/feature_flags/test_feature_flags.py +++ b/tests/functional/feature_flags/test_feature_flags.py @@ -802,7 +802,8 @@ def test_get_configuration_with_envelope_and_raw(mocker, config): assert "log_level" in config assert "log_level" not in features_config - + + ## ## Inequality test cases ## @@ -828,9 +829,12 @@ def test_flags_not_equal_no_match(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", context={"tenant_id": "345345435", "username": "a"}, default=False + ) assert toggle == expected_value + def test_flags_not_equal_match(mocker, config): expected_value = True mocked_app_config_schema = { @@ -876,9 +880,14 @@ def test_flags_less_than_no_match_1(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, + default=False, + ) assert toggle == expected_value + def test_flags_less_than_no_match_2(mocker, config): expected_value = False mocked_app_config_schema = { @@ -899,9 +908,14 @@ def test_flags_less_than_no_match_2(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, + default=False, + ) assert toggle == expected_value + def test_flags_less_than_match(mocker, config): expected_value = True mocked_app_config_schema = { @@ -922,10 +936,15 @@ def test_flags_less_than_match(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, + default=False, + ) assert toggle == expected_value -# Test less than or equal to + +# Test less than or equal to def test_flags_less_than_or_equal_no_match(mocker, config): expected_value = False mocked_app_config_schema = { @@ -946,9 +965,14 @@ def test_flags_less_than_or_equal_no_match(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, + default=False, + ) assert toggle == expected_value + def test_flags_less_than_or_equal_match_1(mocker, config): expected_value = True mocked_app_config_schema = { @@ -969,7 +993,11 @@ def test_flags_less_than_or_equal_match_1(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, + default=False, + ) assert toggle == expected_value @@ -993,9 +1021,14 @@ def test_flags_less_than_or_equal_match_2(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, + default=False, + ) assert toggle == expected_value + # Test greater than def test_flags_greater_than_no_match_1(mocker, config): expected_value = False @@ -1017,9 +1050,14 @@ def test_flags_greater_than_no_match_1(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, + default=False, + ) assert toggle == expected_value + def test_flags_greater_than_no_match_2(mocker, config): expected_value = False mocked_app_config_schema = { @@ -1040,9 +1078,14 @@ def test_flags_greater_than_no_match_2(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, + default=False, + ) assert toggle == expected_value + def test_flags_greater_than_match(mocker, config): expected_value = True mocked_app_config_schema = { @@ -1063,10 +1106,15 @@ def test_flags_greater_than_match(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, + default=False, + ) assert toggle == expected_value -# Test greater than or equal to + +# Test greater than or equal to def test_flags_greater_than_or_equal_no_match(mocker, config): expected_value = False mocked_app_config_schema = { @@ -1087,9 +1135,14 @@ def test_flags_greater_than_or_equal_no_match(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.04.01"}, + default=False, + ) assert toggle == expected_value + def test_flags_greater_than_or_equal_match_1(mocker, config): expected_value = True mocked_app_config_schema = { @@ -1110,7 +1163,11 @@ def test_flags_greater_than_or_equal_match_1(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.12.25"}, + default=False, + ) assert toggle == expected_value @@ -1134,6 +1191,9 @@ def test_flags_greater_than_or_equal_match_2(mocker, config): } } feature_flags = init_feature_flags(mocker, mocked_app_config_schema, config) - toggle = feature_flags.evaluate(name="my_feature", context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, default=False) + toggle = feature_flags.evaluate( + name="my_feature", + context={"tenant_id": "345345435", "username": "a", "current_date": "2021.10.31"}, + default=False, + ) assert toggle == expected_value - diff --git a/tests/functional/idempotency/conftest.py b/tests/functional/idempotency/conftest.py index f563b4bbcda..71b5978497c 100644 --- a/tests/functional/idempotency/conftest.py +++ b/tests/functional/idempotency/conftest.py @@ -11,10 +11,10 @@ from botocore.config import Config from jmespath import functions -from aws_lambda_powertools.shared.jmespath_utils import extract_data_from_envelope from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer from aws_lambda_powertools.utilities.idempotency.idempotency import IdempotencyConfig +from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope from aws_lambda_powertools.utilities.validation import envelopes from tests.functional.utils import load_event From 465b6cb551917ef91c80a8ba4e31b6fd77e0c8fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 14:46:33 +0200 Subject: [PATCH 37/40] chore(deps-dev): bump coverage from 5.5 to 6.0 (#732) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 109 +++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- 2 files changed, 53 insertions(+), 58 deletions(-) diff --git a/poetry.lock b/poetry.lock index d8524392f55..33a00204a8b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -149,17 +149,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "5.5" +version = "6.0" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.6" [package.dependencies] -toml = {version = "*", optional = true, markers = "extra == \"toml\""} +tomli = {version = "*", optional = true, markers = "extra == \"toml\""} [package.extras] -toml = ["toml"] +toml = ["tomli"] [[package]] name = "dataclasses" @@ -968,6 +968,14 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "1.2.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "typed-ast" version = "1.4.3" @@ -1047,7 +1055,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "051e2505e2df84e7715e8c160fac3fb984a1e274ca2cbff43fc36962538c2e43" +content-hash = "954bc39735889712f4db3b3ccda42ae39ed25e75cd459ddb6ae634b54bede0a1" [metadata.files] appdirs = [ @@ -1098,58 +1106,41 @@ colorama = [ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, + {file = "coverage-6.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:3dfb23cc180b674a11a559183dff9655beb9da03088f3fe3c4f3a6d200c86f05"}, + {file = "coverage-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5dd5ae0a9cd55d71f1335c331e9625382239b8cede818fb62d8d2702336dbf8"}, + {file = "coverage-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8426fec5ad5a6e8217921716b504e9b6e1166dc147e8443b4855e329db686282"}, + {file = "coverage-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aa5d4d43fa18cc9d0c6e02a83de0b9729b5451a9066574bd276481474f0a53ab"}, + {file = "coverage-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78dd3eeb8f5ff26d2113c41836bac04a9ea91be54c346826b54a373133c8c53"}, + {file = "coverage-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:581fddd2f883379bd5af51da9233e0396b6519f3d3eeae4fb88867473be6d56e"}, + {file = "coverage-6.0-cp310-cp310-win32.whl", hash = "sha256:43bada49697a62ffa0283c7f01bbc76aac562c37d4bb6c45d56dd008d841194e"}, + {file = "coverage-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:fa816e97cfe1f691423078dffa39a18106c176f28008db017b3ce3e947c34aa5"}, + {file = "coverage-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5c191e01b23e760338f19d8ba2470c0dad44c8b45e41ac043b2db84efc62f695"}, + {file = "coverage-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274a612f67f931307706b60700f1e4cf80e1d79dff6c282fc9301e4565e78724"}, + {file = "coverage-6.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9dbfcbc56d8de5580483cf2caff6a59c64d3e88836cbe5fb5c20c05c29a8808"}, + {file = "coverage-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e63490e8a6675cee7a71393ee074586f7eeaf0e9341afd006c5d6f7eec7c16d7"}, + {file = "coverage-6.0-cp36-cp36m-win32.whl", hash = "sha256:72f8c99f1527c5a8ee77c890ea810e26b39fd0b4c2dffc062e20a05b2cca60ef"}, + {file = "coverage-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:88f1810eb942e7063d051d87aaaa113eb5fd5a7fd2cda03a972de57695b8bb1a"}, + {file = "coverage-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:befb5ffa9faabef6dadc42622c73de168001425258f0b7e402a2934574e7a04b"}, + {file = "coverage-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dbda34e8e26bd86606ba8a9c13ccb114802e01758a3d0a75652ffc59a573220"}, + {file = "coverage-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b4ee5815c776dfa3958ba71c7cd4cdd8eb40d79358a18352feb19562fe4408c4"}, + {file = "coverage-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d82cbef1220703ce56822be7fbddb40736fc1a928ac893472df8aff7421ae0aa"}, + {file = "coverage-6.0-cp37-cp37m-win32.whl", hash = "sha256:d795a2c92fe8cb31f6e9cd627ee4f39b64eb66bf47d89d8fcf7cb3d17031c887"}, + {file = "coverage-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6e216e4021c934246c308fd3e0d739d9fa8a3f4ea414f584ab90ef9c1592f282"}, + {file = "coverage-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8305e14112efb74d0b5fec4df6e41cafde615c2392a7e51c84013cafe945842c"}, + {file = "coverage-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4865dc4a7a566147cbdc2b2f033a6cccc99a7dcc89995137765c384f6c73110b"}, + {file = "coverage-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:25df2bc53a954ba2ccf230fa274d1de341f6aa633d857d75e5731365f7181749"}, + {file = "coverage-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08fd55d2e00dac4c18a2fa26281076035ec86e764acdc198b9185ce749ada58f"}, + {file = "coverage-6.0-cp38-cp38-win32.whl", hash = "sha256:11ce082eb0f7c2bbfe96f6c8bcc3a339daac57de4dc0f3186069ec5c58da911c"}, + {file = "coverage-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7844a8c6a0fee401edbf578713c2473e020759267c40261b294036f9d3eb6a2d"}, + {file = "coverage-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bea681309bdd88dd1283a8ba834632c43da376d9bce05820826090aad80c0126"}, + {file = "coverage-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e735ab8547d8a1fe8e58dd765d6f27ac539b395f52160d767b7189f379f9be7a"}, + {file = "coverage-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7593a49300489d064ebb6c58539f52cbbc4a2e6a4385de5e92cae1563f88a425"}, + {file = "coverage-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adb0f4c3c8ba8104378518a1954cbf3d891a22c13fd0e0bf135391835f44f288"}, + {file = "coverage-6.0-cp39-cp39-win32.whl", hash = "sha256:8da0c4a26a831b392deaba5fdd0cd7838d173b47ce2ec3d0f37be630cb09ef6e"}, + {file = "coverage-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:7af2f8e7bb54ace984de790e897f858e88068d8fbc46c9490b7c19c59cf51822"}, + {file = "coverage-6.0-pp36-none-any.whl", hash = "sha256:82b58d37c47d93a171be9b5744bcc96a0012cbf53d5622b29a49e6be2097edd7"}, + {file = "coverage-6.0-pp37-none-any.whl", hash = "sha256:fff04bfefb879edcf616f1ce5ea6f4a693b5976bdc5e163f8464f349c25b59f0"}, + {file = "coverage-6.0.tar.gz", hash = "sha256:17983f6ccc47f4864fd16d20ff677782b23d1207bf222d10e4d676e4636b0872"}, ] dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, @@ -1607,6 +1598,10 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"}, + {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"}, +] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, diff --git a/pyproject.toml b/pyproject.toml index ee1abeef683..8841b5c3dd8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ pydantic = {version = "^1.8.2", optional = true } email-validator = {version = "*", optional = true } [tool.poetry.dev-dependencies] -coverage = {extras = ["toml"], version = "^5.5"} +coverage = {extras = ["toml"], version = "^6.0"} pytest = "^6.2.5" black = "^20.8b1" flake8 = "^3.9.0" From 5641914cd2c472b561757bb072d5dc23e3d3b1bd Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 5 Oct 2021 14:49:02 +0200 Subject: [PATCH 38/40] docs(jmespath): clarify envelope terminology --- docs/utilities/jmespath_functions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md index e11452cd5cc..583357a55e2 100644 --- a/docs/utilities/jmespath_functions.md +++ b/docs/utilities/jmespath_functions.md @@ -18,6 +18,8 @@ You might have events that contains encoded JSON payloads as string, base64, or Lambda Powertools also have utilities like [validation](validation.md), [idempotency](idempotency.md), or [feature flags](feature_flags.md) where you might need to extract a portion of your data before using them. +!!! info "**Envelope** is the terminology we use for the JMESPath expression to extract your JSON object from your data input" + ### Extracting data You can use the `extract_data_from_envelope` function along with any [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank"}. From 97c94235fe49c586df458440b48550605c8ac3a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 12:50:41 +0000 Subject: [PATCH 39/40] chore(deps-dev): bump pytest-cov from 2.12.1 to 3.0.0 (#730) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 2.12.1 to 3.0.0.
Changelog

Sourced from pytest-cov's changelog.

3.0.0 (2021-10-04)

Note that this release drops support for Python 2.7 and Python 3.5.

  • Added support for Python 3.10 and updated various test dependencies. Contributed by Hugo van Kemenade in [#500](https://github.com/pytest-dev/pytest-cov/issues/500) <https://github.com/pytest-dev/pytest-cov/pull/500>_.
  • Switched from Travis CI to GitHub Actions. Contributed by Hugo van Kemenade in [#494](https://github.com/pytest-dev/pytest-cov/issues/494) <https://github.com/pytest-dev/pytest-cov/pull/494>_ and [#495](https://github.com/pytest-dev/pytest-cov/issues/495) <https://github.com/pytest-dev/pytest-cov/pull/495>_.
  • Add a --cov-reset CLI option. Contributed by Danilo Šegan in [#459](https://github.com/pytest-dev/pytest-cov/issues/459) <https://github.com/pytest-dev/pytest-cov/pull/459>_.
  • Improved validation of --cov-fail-under CLI option. Contributed by ... Ronny Pfannschmidt's desire for skark in [#480](https://github.com/pytest-dev/pytest-cov/issues/480) <https://github.com/pytest-dev/pytest-cov/pull/480>_.
  • Dropped Python 2.7 support. Contributed by Thomas Grainger in [#488](https://github.com/pytest-dev/pytest-cov/issues/488) <https://github.com/pytest-dev/pytest-cov/pull/488>_.
  • Updated trove classifiers. Contributed by Michał Bielawski in [#481](https://github.com/pytest-dev/pytest-cov/issues/481) <https://github.com/pytest-dev/pytest-cov/pull/481>_.

2.13.0 (2021-06-01)

  • Changed the toml requirement to be always be directly required (instead of being required through a coverage extra). This fixes issues with pip-compile (pip-tools#1300 <https://github.com/jazzband/pip-tools/issues/1300>). Contributed by Sorin Sbarnea in [#472](https://github.com/pytest-dev/pytest-cov/issues/472) <https://github.com/pytest-dev/pytest-cov/pull/472>.
  • Documented show_contexts. Contributed by Brian Rutledge in [#473](https://github.com/pytest-dev/pytest-cov/issues/473) <https://github.com/pytest-dev/pytest-cov/pull/473>_.
Commits
  • 560b955 Bump version: 2.12.1 → 3.0.0
  • e988a6c Update changelog.
  • f015932 Merge pull request #500 from hugovk/add-3.10
  • 60a3cc1 No need to build universal wheels for Python 3-only
  • 0bc997a Add support for Python 3.10
  • 679935b Merge pull request #494 from hugovk/test-on-github-actions
  • 96f9aad Add 'all good' job to be added as a required build
  • 6395ece Test conditional collection on PyPy and CPython
  • f4a88d6 Test both PyPy3.6 and PyPy3.7
  • a948e89 Test both PyPy3.6 and PyPy3.7
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=2.12.1&new-version=3.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--- poetry.lock | 13 ++++++------- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 33a00204a8b..8e6806f9848 100644 --- a/poetry.lock +++ b/poetry.lock @@ -785,16 +785,15 @@ testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.12.1" +version = "3.0.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] -coverage = ">=5.2.1" +coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" -toml = "*" [package.extras] testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] @@ -1055,7 +1054,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "954bc39735889712f4db3b3ccda42ae39ed25e75cd459ddb6ae634b54bede0a1" +content-hash = "a0358de807bcf4fe1af43ac28f40f41552d559b11cfc5ebd099a4e3842f87a8d" [metadata.files] appdirs = [ @@ -1440,8 +1439,8 @@ pytest-asyncio = [ {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] pytest-cov = [ - {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, - {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] pytest-mock = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, diff --git a/pyproject.toml b/pyproject.toml index 8841b5c3dd8..ca188d52f84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ flake8-fixme = "^1.1.1" flake8-isort = "^4.0.0" flake8-variables-names = "^0.0.4" isort = "^5.9.3" -pytest-cov = "^2.12.1" +pytest-cov = "^3.0.0" pytest-mock = "^3.5.1" pdoc3 = "^0.10.0" pytest-asyncio = "^0.15.1" From 241b45ad353d745ef4cc7015bfecb023157daf13 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 5 Oct 2021 16:33:51 +0200 Subject: [PATCH 40/40] chore: bump to 1.21.0 --- CHANGELOG.md | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- 2 files changed, 65 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8993c40c73..70b8d32c004 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,70 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] +## 1.21.0 - 2021-10-05 + +### Bug Fixes + +* **data-classes:** use correct asdict funciton ([#666](https://github.com/awslabs/aws-lambda-powertools-python/issues/666)) +* **feature-flags:** rules should evaluate with an AND op ([#724](https://github.com/awslabs/aws-lambda-powertools-python/issues/724)) +* **idempotency:** sorting keys before hashing ([#722](https://github.com/awslabs/aws-lambda-powertools-python/issues/722)) +* **idempotency:** sorting keys before hashing +* **logger:** push extra keys to the end ([#722](https://github.com/awslabs/aws-lambda-powertools-python/issues/722)) +* **mypy:** a few return types, type signatures, and untyped areas ([#718](https://github.com/awslabs/aws-lambda-powertools-python/issues/718)) + +### Code Refactoring + +* **data-classes:** clean up internal logic for APIGatewayAuthorizerResponse ([#643](https://github.com/awslabs/aws-lambda-powertools-python/issues/643)) + +### Documentation + +* Terraform reference for SAR Lambda Layer ([#716](https://github.com/awslabs/aws-lambda-powertools-python/issues/716)) +* **event-handler:** document catch-all routes ([#705](https://github.com/awslabs/aws-lambda-powertools-python/issues/705)) +* **idempotency:** fix misleading idempotent examples ([#661](https://github.com/awslabs/aws-lambda-powertools-python/issues/661)) +* **jmespath:** clarify envelope terminology +* **parser:** fix incorrect import in root_validator example ([#735](https://github.com/awslabs/aws-lambda-powertools-python/issues/735)) + +### Features + +* expose jmespath powertools functions ([#736](https://github.com/awslabs/aws-lambda-powertools-python/issues/736)) +* boto3 sessions in batch, parameters & idempotency ([#717](https://github.com/awslabs/aws-lambda-powertools-python/issues/717)) +* **feature-flags**: add get_raw_configuration property in store; expose store ([#720](https://github.com/awslabs/aws-lambda-powertools-python/issues/720)) +* **feature-flags:** Bring your own logger for debug ([#709](https://github.com/awslabs/aws-lambda-powertools-python/issues/709)) +* **feature-flags:** improve "IN/NOT_IN"; new rule actions ([#710](https://github.com/awslabs/aws-lambda-powertools-python/issues/710)) +* **feature-flags:** get_raw_configuration property in Store ([#720](https://github.com/awslabs/aws-lambda-powertools-python/issues/720)) +* **feature_flags:** Added inequality conditions ([#721](https://github.com/awslabs/aws-lambda-powertools-python/issues/721)) +* **idempotency:** makes customers unit testing easier ([#719](https://github.com/awslabs/aws-lambda-powertools-python/issues/719)) +* **validator:** include missing data elements from a validation error ([#686](https://github.com/awslabs/aws-lambda-powertools-python/issues/686)) + +### Maintenance + +* add python 3.9 support +* **deps:** bump boto3 from 1.18.51 to 1.18.54 ([#733](https://github.com/awslabs/aws-lambda-powertools-python/issues/733)) +* **deps:** bump boto3 from 1.18.32 to 1.18.38 ([#671](https://github.com/awslabs/aws-lambda-powertools-python/issues/671)) +* **deps:** bump boto3 from 1.18.38 to 1.18.41 ([#677](https://github.com/awslabs/aws-lambda-powertools-python/issues/677)) +* **deps:** bump boto3 from 1.18.49 to 1.18.51 ([#713](https://github.com/awslabs/aws-lambda-powertools-python/issues/713)) +* **deps:** bump boto3 from 1.18.41 to 1.18.49 ([#703](https://github.com/awslabs/aws-lambda-powertools-python/issues/703)) +* **deps:** bump codecov/codecov-action from 2.0.2 to 2.1.0 ([#675](https://github.com/awslabs/aws-lambda-powertools-python/issues/675)) +* **deps-dev:** bump coverage from 5.5 to 6.0 ([#732](https://github.com/awslabs/aws-lambda-powertools-python/issues/732)) +* **deps-dev:** bump mkdocs-material from 7.2.8 to 7.3.0 ([#695](https://github.com/awslabs/aws-lambda-powertools-python/issues/695)) +* **deps-dev:** bump mkdocs-material from 7.2.6 to 7.2.8 ([#682](https://github.com/awslabs/aws-lambda-powertools-python/issues/682)) +* **deps-dev:** bump flake8-bugbear from 21.4.3 to 21.9.1 ([#676](https://github.com/awslabs/aws-lambda-powertools-python/issues/676)) +* **deps-dev:** bump flake8-bugbear from 21.9.1 to 21.9.2 ([#712](https://github.com/awslabs/aws-lambda-powertools-python/issues/712)) +* **deps-dev:** bump radon from 4.5.2 to 5.1.0 ([#673](https://github.com/awslabs/aws-lambda-powertools-python/issues/673)) +* **deps-dev:** bump mkdocs-material from 7.3.0 to 7.3.1 ([#731](https://github.com/awslabs/aws-lambda-powertools-python/issues/731)) +* **deps-dev:** bump xenon from 0.7.3 to 0.8.0 ([#669](https://github.com/awslabs/aws-lambda-powertools-python/issues/669)) + +### Bug Fixes + +* **event-handler:** fix issue with strip_prefixes and root level resolvers ([#646](https://github.com/awslabs/aws-lambda-powertools-python/issues/646)) + +### Maintenance + +* **deps:** bump boto3 from 1.18.26 to 1.18.32 ([#663](https://github.com/awslabs/aws-lambda-powertools-python/issues/663)) +* **deps-dev:** bump mkdocs-material from 7.2.4 to 7.2.6 ([#665](https://github.com/awslabs/aws-lambda-powertools-python/issues/665)) +* **deps-dev:** bump pytest from 6.2.4 to 6.2.5 ([#662](https://github.com/awslabs/aws-lambda-powertools-python/issues/662)) +* **deps-dev:** bump mike from 0.6.0 to 1.0.1 ([#453](https://github.com/awslabs/aws-lambda-powertools-python/issues/453)) +* **license:** add third party license to pyproject.toml ([#641](https://github.com/awslabs/aws-lambda-powertools-python/issues/641)) ## 1.20.2 - 2021-09-02 ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 8841b5c3dd8..6636ddc0f95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.20.2" +version = "1.21.0" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]