From c9252c7fdcb93fa6f5f686325dd9ecbc49a1a1de Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 12:13:11 +0200 Subject: [PATCH 01/42] chore: correct typos --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b3416db3583..4620acb8bb6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.0.1] - 2020-07-06 ### Fixed -- **Logger**: Fix a bug with `inject_lambda_context` causing existing an Logger keys to be overriden if `structure_logs` was called before +- **Logger**: Fix a bug with `inject_lambda_context` causing existing an Logger keys to be overridden if `structure_logs` was called before ## [1.0.0] - 2020-06-18 ### Added @@ -102,7 +102,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [0.8.0] - 2020-04-24 ### Added -- **Logger**: Introduced `Logger` class for stuctured logging as a replacement for `logger_setup` +- **Logger**: Introduced `Logger` class for structured logging as a replacement for `logger_setup` - **Logger**: Introduced `Logger.inject_lambda_context` decorator as a replacement for `logger_inject_lambda_context` ### Removed From d4f8a194ec55a4fe7aaca93cba64cbffc9fa509b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 14:08:38 +0200 Subject: [PATCH 02/42] fix: split ColdStart metric to its own EMF blob #125 --- aws_lambda_powertools/metrics/metrics.py | 10 +++--- tests/functional/test_metrics.py | 39 +++++++++++++++++++++++- 2 files changed, 44 insertions(+), 5 deletions(-) diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index fe4fb559d6f..205f30d9545 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -4,7 +4,8 @@ import warnings from typing import Any, Callable -from .base import MetricManager +from .base import MetricManager, MetricUnit +from .metric import single_metric logger = logging.getLogger(__name__) @@ -167,6 +168,7 @@ def __add_cold_start_metric(self, context: Any): global is_cold_start if is_cold_start: logger.debug("Adding cold start metric and function_name dimension") - self.add_metric(name="ColdStart", value=1, unit="Count") - self.add_dimension(name="function_name", value=context.function_name) - is_cold_start = False + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace=self.namespace) as metric: + metric.add_dimension(name="function_name", value=context.function_name) + metric.add_dimension(name="service", value=self.service) + is_cold_start = False diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index 3407441a7bc..0f948d9109f 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -7,6 +7,7 @@ from aws_lambda_powertools import Metrics, single_metric from aws_lambda_powertools.metrics import MetricUnit, MetricUnitError, MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics import metrics as metrics_global from aws_lambda_powertools.metrics.base import MetricManager @@ -14,6 +15,7 @@ def reset_metric_set(): metrics = Metrics() metrics.clear_metrics() + metrics_global.is_cold_start = True # ensure each test has cold start yield @@ -112,6 +114,10 @@ def capture_metrics_output(capsys): return json.loads(capsys.readouterr().out.strip()) +def capture_metrics_output_multiple_emf_objects(capsys): + return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line] + + def test_single_metric_logs_one_metric_only(capsys, metric, dimension, namespace): # GIVEN we try adding more than one metric # WHEN using single_metric context manager @@ -495,7 +501,7 @@ def lambda_handler(evt, context): LambdaContext = namedtuple("LambdaContext", "function_name") lambda_handler({}, LambdaContext("example_fn")) - _ = capture_metrics_output(capsys) # ignore first stdout captured + _, _ = capture_metrics_output_multiple_emf_objects(capsys) # ignore first stdout captured # THEN ColdStart metric and function_name dimension should be logged once lambda_handler({}, LambdaContext("example_fn")) @@ -630,3 +636,34 @@ def test_serialize_metric_set_metric_definition(metric, dimension, namespace, se assert "Timestamp" in metric_definition_output["_aws"] remove_timestamp(metrics=[metric_definition_output, expected_metric_definition]) assert metric_definition_output == expected_metric_definition + + +def test_log_metrics_capture_cold_start_metric_separately(capsys, namespace, service, metric, dimension): + # GIVEN Metrics is initialized + my_metrics = Metrics(service=service, namespace=namespace) + + # WHEN log_metrics is used with capture_cold_start_metric + @my_metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(evt, context): + my_metrics.add_metric(**metric) + my_metrics.add_dimension(**dimension) + + LambdaContext = namedtuple("LambdaContext", "function_name") + lambda_handler({}, LambdaContext("example_fn")) + + cold_start_blob, custom_metrics_blob = capture_metrics_output_multiple_emf_objects(capsys) + + # THEN ColdStart metric and function_name dimension should be logged + # in a separate EMF blob than the application metrics + assert cold_start_blob["ColdStart"] == 1 + assert cold_start_blob["function_name"] == "example_fn" + assert cold_start_blob["service"] == service + + # and that application metrics dimensions are not part of ColdStart EMF blob + assert "test_dimension" not in cold_start_blob + + # THEN application metrics EMF blob should not have function_name dimension + assert "function_name" not in custom_metrics_blob + assert custom_metrics_blob["service"] == service + assert custom_metrics_blob["single_metric"] == metric["value"] + assert custom_metrics_blob["test_dimension"] == dimension["value"] From 4cb4982d0324aa46038cfd92836571bf90c50880 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 14:14:17 +0200 Subject: [PATCH 03/42] fix: update cold_start doc to reflect #125 --- docs/content/core/metrics.mdx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/content/core/metrics.mdx b/docs/content/core/metrics.mdx index 7265d9b8a50..696074c6812 100644 --- a/docs/content/core/metrics.mdx +++ b/docs/content/core/metrics.mdx @@ -238,7 +238,12 @@ def lambda_handler(evt, ctx): ... ``` -If it's a cold start, this feature will add a metric named `ColdStart` and a dimension named `function_name`. +If it's a cold start invocation, this feature will: + +* Create a separate EMF blob solely containing a metric named `ColdStart` +* Add `function_name` and `service` dimensions + +This has the advantage of keeping cold start metric separate from your application metrics. ## Testing your code From e6e5b85b37885cb007f6b99302eeaa0d18188e70 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 14:17:13 +0200 Subject: [PATCH 04/42] chore: add metrics fix description --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4620acb8bb6..dcae391835c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Fixed +- **Metrics**: Cold start metric is now completely separate from application metrics dimensions, making it easier and cheaper to visualize + ## [1.1.3] - 2020-08-18 ### Fixed - **Logger**: Logs emitted twice, structured and unstructured, due to Lambda configuring the root handler From ea26fc9e04c0784ef788df60e211b500fd2b3ce1 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Sat, 22 Aug 2020 17:17:46 +0200 Subject: [PATCH 05/42] chore: grammar Co-authored-by: Tom McCarthy --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68540f22b46..caafe2f8474 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,7 +47,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.0.1] - 2020-07-06 ### Fixed -- **Logger**: Fix a bug with `inject_lambda_context` causing existing an Logger keys to be overridden if `structure_logs` was called before +- **Logger**: Fix a bug with `inject_lambda_context` causing existing Logger keys to be overridden if `structure_logs` was called before ## [1.0.0] - 2020-06-18 ### Added From 059fec9aab2b9f91a4f809540ccc6f4d85fe50cc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 17:19:51 +0200 Subject: [PATCH 06/42] improv: explicitly assert not having ColdStart metric --- tests/functional/test_metrics.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index 0f948d9109f..4d092131988 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -662,8 +662,12 @@ def lambda_handler(evt, context): # and that application metrics dimensions are not part of ColdStart EMF blob assert "test_dimension" not in cold_start_blob - # THEN application metrics EMF blob should not have function_name dimension + # THEN application metrics EMF blob should not have + # ColdStart metric nor function_name dimension assert "function_name" not in custom_metrics_blob + assert "ColdStart" not in custom_metrics_blob + + # and that application metrics are recorded as normal assert custom_metrics_blob["service"] == service assert custom_metrics_blob["single_metric"] == metric["value"] assert custom_metrics_blob["test_dimension"] == dimension["value"] From 4e7236d0c8726cb05b300c9d22e1eb58889ca936 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:04:26 +0200 Subject: [PATCH 07/42] feat: capture_response as metadata option #127 --- aws_lambda_powertools/tracing/tracer.py | 99 ++++++++++++++++++------- tests/unit/test_tracing.py | 34 +++++++++ 2 files changed, 107 insertions(+), 26 deletions(-) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 4c12be3fc26..bac70ddaeb5 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -226,12 +226,19 @@ def patch(self, modules: Tuple[str] = None): else: aws_xray_sdk.core.patch(modules) - def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = None): + def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = None, capture_response: bool = True): """Decorator to create subsegment for lambda handlers As Lambda follows (event, context) signature we can remove some of the boilerplate and also capture any exception any Lambda function throws or its response as metadata + Parameters + ---------- + lambda_handler : Callable + Method to annotate on + capture_response : bool, optional + Instructs tracer to not include handler's response as metadata, by default True + Example ------- **Lambda function using capture_lambda_handler decorator** @@ -241,16 +248,24 @@ def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = No def handler(event, context): ... - Parameters - ---------- - method : Callable - Method to annotate on + **Preventing Tracer to log response as metadata** + + tracer = Tracer(service="payment") + @tracer.capture_lambda_handler(capture_response=False) + def handler(event, context): + ... Raises ------ err Exception raised by method """ + # If handler is None we've been called with parameters + # Return a partial function with args filled + if lambda_handler is None: + logger.debug("Decorator called with parameters") + return functools.partial(self.capture_lambda_handler, capture_response=capture_response) + lambda_handler_name = lambda_handler.__name__ @functools.wraps(lambda_handler) @@ -268,7 +283,10 @@ def decorate(event, context): logger.debug("Received lambda handler response successfully") logger.debug(response) self._add_response_as_metadata( - function_name=lambda_handler_name, data=response, subsegment=subsegment + function_name=lambda_handler_name, + data=response, + subsegment=subsegment, + capture_response=capture_response, ) except Exception as err: logger.exception(f"Exception received from {lambda_handler_name}") @@ -281,7 +299,7 @@ def decorate(event, context): return decorate - def capture_method(self, method: Callable = None): + def capture_method(self, method: Callable = None, capture_response: bool = True): """Decorator to create subsegment for arbitrary functions It also captures both response and exceptions as metadata @@ -295,6 +313,13 @@ def capture_method(self, method: Callable = None): `async.gather` is called, or use `in_subsegment_async` context manager via our escape hatch mechanism - See examples. + Parameters + ---------- + method : Callable + Method to annotate on + capture_response : bool, optional + Instructs tracer to not include method's response as metadata, by default True + Example ------- **Custom function using capture_method decorator** @@ -416,29 +441,31 @@ async def async_tasks(): return { "task": "done", **ret } - Parameters - ---------- - method : Callable - Method to annotate on - Raises ------ err Exception raised by method """ + # If method is None we've been called with parameters + # Return a partial function with args filled + if method is None: + logger.debug("Decorator called with parameters") + return functools.partial(self.capture_method, capture_response=capture_response) if inspect.iscoroutinefunction(method): - decorate = self._decorate_async_function(method=method) + decorate = self._decorate_async_function(method=method, capture_response=capture_response) elif inspect.isgeneratorfunction(method): - decorate = self._decorate_generator_function(method=method) + decorate = self._decorate_generator_function(method=method, capture_response=capture_response) elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__): - decorate = self._decorate_generator_function_with_context_manager(method=method) + decorate = self._decorate_generator_function_with_context_manager( + method=method, capture_response=capture_response + ) else: - decorate = self._decorate_sync_function(method=method) + decorate = self._decorate_sync_function(method=method, capture_response=capture_response) return decorate - def _decorate_async_function(self, method: Callable = None): + def _decorate_async_function(self, method: Callable = None, capture_response: bool = True): method_name = f"{method.__name__}" @functools.wraps(method) @@ -447,7 +474,12 @@ async def decorate(*args, **kwargs): try: logger.debug(f"Calling method: {method_name}") response = await method(*args, **kwargs) - self._add_response_as_metadata(function_name=method_name, data=response, subsegment=subsegment) + self._add_response_as_metadata( + function_name=method_name, + data=response, + subsegment=subsegment, + capture_response=capture_response, + ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) @@ -457,7 +489,7 @@ async def decorate(*args, **kwargs): return decorate - def _decorate_generator_function(self, method: Callable = None): + def _decorate_generator_function(self, method: Callable = None, capture_response: bool = True): method_name = f"{method.__name__}" @functools.wraps(method) @@ -466,7 +498,9 @@ def decorate(*args, **kwargs): try: logger.debug(f"Calling method: {method_name}") result = yield from method(*args, **kwargs) - self._add_response_as_metadata(function_name=method_name, data=result, subsegment=subsegment) + self._add_response_as_metadata( + function_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response + ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) @@ -476,7 +510,7 @@ def decorate(*args, **kwargs): return decorate - def _decorate_generator_function_with_context_manager(self, method: Callable = None): + def _decorate_generator_function_with_context_manager(self, method: Callable = None, capture_response: bool = True): method_name = f"{method.__name__}" @functools.wraps(method) @@ -488,7 +522,9 @@ def decorate(*args, **kwargs): with method(*args, **kwargs) as return_val: result = return_val yield result - self._add_response_as_metadata(function_name=method_name, data=result, subsegment=subsegment) + self._add_response_as_metadata( + function_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response + ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) @@ -496,7 +532,7 @@ def decorate(*args, **kwargs): return decorate - def _decorate_sync_function(self, method: Callable = None): + def _decorate_sync_function(self, method: Callable = None, capture_response: bool = True): method_name = f"{method.__name__}" @functools.wraps(method) @@ -505,7 +541,12 @@ def decorate(*args, **kwargs): try: logger.debug(f"Calling method: {method_name}") response = method(*args, **kwargs) - self._add_response_as_metadata(function_name=method_name, data=response, subsegment=subsegment) + self._add_response_as_metadata( + function_name=method_name, + data=response, + subsegment=subsegment, + capture_response=capture_response, + ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) @@ -516,7 +557,11 @@ def decorate(*args, **kwargs): return decorate def _add_response_as_metadata( - self, function_name: str = None, data: Any = None, subsegment: aws_xray_sdk.core.models.subsegment = None + self, + function_name: str = None, + data: Any = None, + subsegment: aws_xray_sdk.core.models.subsegment = None, + capture_response: bool = True, ): """Add response as metadata for given subsegment @@ -528,8 +573,10 @@ def _add_response_as_metadata( data to add as subsegment metadata, by default None subsegment : aws_xray_sdk.core.models.subsegment, optional existing subsegment to add metadata on, by default None + capture_response : bool, optional + Do not include response as metadata, by default True """ - if data is None or subsegment is None: + if data is None or not capture_response or subsegment is None: return subsegment.put_metadata(key=f"{function_name} response", value=data, namespace=self._config["service"]) diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index 16c476ee0fc..f948b046313 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -502,3 +502,37 @@ def generator_fn(): assert put_metadata_mock_args["namespace"] == "booking" assert isinstance(put_metadata_mock_args["value"], ValueError) assert str(put_metadata_mock_args["value"]) == "test" + + +def test_tracer_lambda_handler_does_not_add_response_as_metadata(mocker, provider_stub, in_subsegment_mock): + # GIVEN tracer is initialized + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider, auto_patch=False) + + # WHEN capture_lambda_handler decorator is used + # and the handler response is empty + @tracer.capture_lambda_handler(capture_response=False) + def handler(event, context): + return "response" + + handler({}, mocker.MagicMock()) + + # THEN we should not add any metadata + assert in_subsegment_mock.put_metadata.call_count == 0 + + +def test_tracer_method_does_not_add_response_as_metadata(mocker, provider_stub, in_subsegment_mock): + # GIVEN tracer is initialized + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider, auto_patch=False) + + # WHEN capture_method decorator is used + # and the method response is empty + @tracer.capture_method(capture_response=False) + def greeting(name, message): + return "response" + + greeting(name="Foo", message="Bar") + + # THEN we should not add any metadata + assert in_subsegment_mock.put_metadata.call_count == 0 From 3ff8a013fd174b7d2c71489a0caffd325abd583a Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:08:45 +0200 Subject: [PATCH 08/42] fix: correct in_subsegment assertion --- tests/unit/test_tracing.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index f948b046313..3e2492b9e15 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -179,10 +179,9 @@ def test_tracer_no_autopatch(patch_mock): assert patch_mock.call_count == 0 -def test_tracer_lambda_handler_does_not_add_empty_response_as_metadata(mocker, provider_stub): +def test_tracer_lambda_handler_does_not_add_empty_response_as_metadata(mocker, provider_stub, in_subsegment_mock): # GIVEN tracer is initialized - put_metadata_mock = mocker.MagicMock() - provider = provider_stub(put_metadata_mock=put_metadata_mock) + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) tracer = Tracer(provider=provider) # WHEN capture_lambda_handler decorator is used @@ -194,13 +193,12 @@ def handler(event, context): handler({}, mocker.MagicMock()) # THEN we should not add empty metadata - assert put_metadata_mock.call_count == 0 + assert in_subsegment_mock.put_metadata.call_count == 0 -def test_tracer_method_does_not_add_empty_response_as_metadata(mocker, provider_stub): +def test_tracer_method_does_not_add_empty_response_as_metadata(mocker, provider_stub, in_subsegment_mock): # GIVEN tracer is initialized - put_metadata_mock = mocker.MagicMock() - provider = provider_stub(put_metadata_mock=put_metadata_mock) + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) tracer = Tracer(provider=provider) # WHEN capture_method decorator is used @@ -212,7 +210,7 @@ def greeting(name, message): greeting(name="Foo", message="Bar") # THEN we should not add empty metadata - assert put_metadata_mock.call_count == 0 + assert in_subsegment_mock.put_metadata.call_count == 0 @mock.patch("aws_lambda_powertools.tracing.tracer.aws_xray_sdk.core.patch") From 582f023db81880c9dc04ee8665f2b8f782469ffc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:19:20 +0200 Subject: [PATCH 09/42] improv: naming consistency --- aws_lambda_powertools/tracing/tracer.py | 82 +++++++++++++------------ 1 file changed, 43 insertions(+), 39 deletions(-) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index bac70ddaeb5..34dbbf5faf9 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -158,19 +158,19 @@ def __init__( def put_annotation(self, key: str, value: Any): """Adds annotation to existing segment or subsegment + Parameters + ---------- + key : str + Annotation key + value : any + Value for annotation + Example ------- Custom annotation for a pseudo service named payment tracer = Tracer(service="payment") tracer.put_annotation("PaymentStatus", "CONFIRMED") - - Parameters - ---------- - key : str - Annotation key (e.g. PaymentStatus) - value : any - Value for annotation (e.g. "CONFIRMED") """ if self.disabled: logger.debug("Tracing has been disabled, aborting put_annotation") @@ -283,7 +283,7 @@ def decorate(event, context): logger.debug("Received lambda handler response successfully") logger.debug(response) self._add_response_as_metadata( - function_name=lambda_handler_name, + method_name=lambda_handler_name, data=response, subsegment=subsegment, capture_response=capture_response, @@ -291,7 +291,7 @@ def decorate(event, context): except Exception as err: logger.exception(f"Exception received from {lambda_handler_name}") self._add_full_exception_as_metadata( - function_name=lambda_handler_name, error=err, subsegment=subsegment + method_name=lambda_handler_name, error=err, subsegment=subsegment ) raise @@ -452,22 +452,28 @@ async def async_tasks(): logger.debug("Decorator called with parameters") return functools.partial(self.capture_method, capture_response=capture_response) + method_name = f"{method.__name__}" + if inspect.iscoroutinefunction(method): - decorate = self._decorate_async_function(method=method, capture_response=capture_response) + decorate = self._decorate_async_function( + method=method, capture_response=capture_response, method_name=method_name + ) elif inspect.isgeneratorfunction(method): - decorate = self._decorate_generator_function(method=method, capture_response=capture_response) + decorate = self._decorate_generator_function( + method=method, capture_response=capture_response, method_name=method_name + ) elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__): decorate = self._decorate_generator_function_with_context_manager( - method=method, capture_response=capture_response + method=method, capture_response=capture_response, method_name=method_name ) else: - decorate = self._decorate_sync_function(method=method, capture_response=capture_response) + decorate = self._decorate_sync_function( + method=method, capture_response=capture_response, method_name=method_name + ) return decorate - def _decorate_async_function(self, method: Callable = None, capture_response: bool = True): - method_name = f"{method.__name__}" - + def _decorate_async_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None): @functools.wraps(method) async def decorate(*args, **kwargs): async with self.provider.in_subsegment_async(name=f"## {method_name}") as subsegment: @@ -475,23 +481,23 @@ async def decorate(*args, **kwargs): logger.debug(f"Calling method: {method_name}") response = await method(*args, **kwargs) self._add_response_as_metadata( - function_name=method_name, + method_name=method_name, data=response, subsegment=subsegment, capture_response=capture_response, ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) raise return response return decorate - def _decorate_generator_function(self, method: Callable = None, capture_response: bool = True): - method_name = f"{method.__name__}" - + def _decorate_generator_function( + self, method: Callable = None, capture_response: bool = True, method_name: str = None + ): @functools.wraps(method) def decorate(*args, **kwargs): with self.provider.in_subsegment(name=f"## {method_name}") as subsegment: @@ -499,20 +505,20 @@ def decorate(*args, **kwargs): logger.debug(f"Calling method: {method_name}") result = yield from method(*args, **kwargs) self._add_response_as_metadata( - function_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response + method_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) raise return result return decorate - def _decorate_generator_function_with_context_manager(self, method: Callable = None, capture_response: bool = True): - method_name = f"{method.__name__}" - + def _decorate_generator_function_with_context_manager( + self, method: Callable = None, capture_response: bool = True, method_name: str = None + ): @functools.wraps(method) @contextlib.contextmanager def decorate(*args, **kwargs): @@ -523,18 +529,16 @@ def decorate(*args, **kwargs): result = return_val yield result self._add_response_as_metadata( - function_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response + method_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) raise return decorate - def _decorate_sync_function(self, method: Callable = None, capture_response: bool = True): - method_name = f"{method.__name__}" - + def _decorate_sync_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None): @functools.wraps(method) def decorate(*args, **kwargs): with self.provider.in_subsegment(name=f"## {method_name}") as subsegment: @@ -542,14 +546,14 @@ def decorate(*args, **kwargs): logger.debug(f"Calling method: {method_name}") response = method(*args, **kwargs) self._add_response_as_metadata( - function_name=method_name, + method_name=method_name, data=response, subsegment=subsegment, capture_response=capture_response, ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) raise return response @@ -558,7 +562,7 @@ def decorate(*args, **kwargs): def _add_response_as_metadata( self, - function_name: str = None, + method_name: str = None, data: Any = None, subsegment: aws_xray_sdk.core.models.subsegment = None, capture_response: bool = True, @@ -567,7 +571,7 @@ def _add_response_as_metadata( Parameters ---------- - function_name : str, optional + method_name : str, optional function name to add as metadata key, by default None data : Any, optional data to add as subsegment metadata, by default None @@ -579,23 +583,23 @@ def _add_response_as_metadata( if data is None or not capture_response or subsegment is None: return - subsegment.put_metadata(key=f"{function_name} response", value=data, namespace=self._config["service"]) + subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self._config["service"]) def _add_full_exception_as_metadata( - self, function_name: str = None, error: Exception = None, subsegment: aws_xray_sdk.core.models.subsegment = None + self, method_name: str = None, error: Exception = None, subsegment: aws_xray_sdk.core.models.subsegment = None ): """Add full exception object as metadata for given subsegment Parameters ---------- - function_name : str, optional + method_name : str, optional function name to add as metadata key, by default None error : Exception, optional error to add as subsegment metadata, by default None subsegment : aws_xray_sdk.core.models.subsegment, optional existing subsegment to add metadata on, by default None """ - subsegment.put_metadata(key=f"{function_name} error", value=error, namespace=self._config["service"]) + subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self._config["service"]) def __disable_tracing_provider(self): """Forcefully disables tracing""" From 0a616871a9fd72e09b55203bd0288fdd1c0bbf35 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:29:00 +0200 Subject: [PATCH 10/42] fix: naming and staticmethod consistency --- aws_lambda_powertools/tracing/tracer.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 34dbbf5faf9..b21eb80266f 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -150,7 +150,7 @@ def __init__( self.auto_patch = self._config["auto_patch"] if self.disabled: - self.__disable_tracing_provider() + self._disable_tracer_provider() if self.auto_patch: self.patch(modules=patch_modules) @@ -601,12 +601,14 @@ def _add_full_exception_as_metadata( """ subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self._config["service"]) - def __disable_tracing_provider(self): + @staticmethod + def _disable_tracer_provider(): """Forcefully disables tracing""" logger.debug("Disabling tracer provider...") aws_xray_sdk.global_sdk_config.set_sdk_enabled(False) - def __is_trace_disabled(self) -> bool: + @staticmethod + def _is_tracer_disabled() -> bool: """Detects whether trace has been disabled Tracing is automatically disabled in the following conditions: @@ -643,7 +645,7 @@ def __build_config( provider: aws_xray_sdk.core.xray_recorder = None, ): """ Populates Tracer config for new and existing initializations """ - is_disabled = disabled if disabled is not None else self.__is_trace_disabled() + is_disabled = disabled if disabled is not None else self._is_tracer_disabled() is_service = service if service is not None else os.getenv("POWERTOOLS_SERVICE_NAME") self._config["provider"] = provider if provider is not None else self._config["provider"] From cb51a4c534bf28e3e01f259eff7458d723f359c6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:39:03 +0200 Subject: [PATCH 11/42] chore: fix debug log adding unused obj --- aws_lambda_powertools/metrics/metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 205f30d9545..5563d653451 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -150,7 +150,7 @@ def decorate(event, context): else: metrics = self.serialize_metric_set() self.clear_metrics() - logger.debug("Publishing metrics", {"metrics": metrics}) + logger.debug("Publishing metrics") print(json.dumps(metrics)) return response From dde963d63352a581be5bcb4481df087067a56f75 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:47:27 +0200 Subject: [PATCH 12/42] chore: remove/correct unnecessary debug logs --- aws_lambda_powertools/metrics/base.py | 4 ++-- aws_lambda_powertools/metrics/metric.py | 2 -- aws_lambda_powertools/metrics/metrics.py | 1 - 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index a1ffe08caf9..175097d9c10 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -177,7 +177,7 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me if self.service and not self.dimension_set.get("service"): self.dimension_set["service"] = self.service - logger.debug("Serializing...", {"metrics": metrics, "dimensions": dimensions}) + logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions}) metric_names_and_units: List[Dict[str, str]] = [] # [ { "Name": "metric_name", "Unit": "Count" } ] metric_names_and_values: Dict[str, str] = {} # { "metric_name": 1.0 } @@ -207,7 +207,7 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me } try: - logger.debug("Validating serialized metrics against CloudWatch EMF schema", embedded_metrics_object) + logger.debug("Validating serialized metrics against CloudWatch EMF schema") fastjsonschema.validate(definition=CLOUDWATCH_EMF_SCHEMA, data=embedded_metrics_object) except fastjsonschema.JsonSchemaException as e: message = f"Invalid format. Error: {e.message}, Invalid item: {e.name}" # noqa: B306, E501 diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 1293139afbe..4451eb2d1d0 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -110,8 +110,6 @@ def single_metric(name: str, unit: MetricUnit, value: float, namespace: str = No metric: SingleMetric = SingleMetric(namespace=namespace) metric.add_metric(name=name, unit=unit, value=value) yield metric - logger.debug("Serializing single metric") metric_set: Dict = metric.serialize_metric_set() finally: - logger.debug("Publishing single metric", {"metric": metric}) print(json.dumps(metric_set)) diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 5563d653451..2ab6cb35b4a 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -150,7 +150,6 @@ def decorate(event, context): else: metrics = self.serialize_metric_set() self.clear_metrics() - logger.debug("Publishing metrics") print(json.dumps(metrics)) return response From 800bca363b09c9b7e257ea1b4491eef80f2aea99 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 20:53:38 +0200 Subject: [PATCH 13/42] chore: clarify changelog bugfix vs breaking change --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index caafe2f8474..adec16c289a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Fixed -- **Metrics**: Cold start metric is now completely separate from application metrics dimensions, making it easier and cheaper to visualize +- **Metrics**: Cold start metric is now completely separate from application metrics dimensions, making it easier and cheaper to visualize. + - This is a breaking change if you were graphing/alerting on both application metrics with the same name to compensate this previous malfunctioning + - Marked as bugfix as this is the intended behaviour since the beginning, as you shouldn't have the same application metric with different dimensions ## [1.3.1] - 2020-08-22 ### Fixed From adfee499af4f4f1eacf22c6ad352622a5bceef8d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 21:02:24 +0200 Subject: [PATCH 14/42] chore: update changelog to reflect new feature --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index adec16c289a..e27158b968a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - This is a breaking change if you were graphing/alerting on both application metrics with the same name to compensate this previous malfunctioning - Marked as bugfix as this is the intended behaviour since the beginning, as you shouldn't have the same application metric with different dimensions +### Added +- **Tracer**: capture_lambda_handler and capture_method decorators now support `capture_response` parameter to not include function's response as part of tracing metadata + ## [1.3.1] - 2020-08-22 ### Fixed - **Tracer**: capture_method decorator did not properly handle nested context managers From a2f5076ab728b73ff52d0c894eb32d39570efd07 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 21:18:21 +0200 Subject: [PATCH 15/42] docs: bring new feature upfront when returning sensitive info --- docs/content/core/tracer.mdx | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx index 677ad4ccae0..004e1ad6acf 100644 --- a/docs/content/core/tracer.mdx +++ b/docs/content/core/tracer.mdx @@ -17,6 +17,12 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. * Support tracing async methods, generators, and context managers * Auto patch supported modules, or a tuple of explicit modules supported by AWS X-Ray + + Returning sensitive information from your Lambda handler or functions, where Tracer is used? +

+ You can disable capturing their responses as tracing metadata with capture_response=False parameter for both capture_lambda_handler and capture_method decorators. +

+ ## Initialization Your AWS Lambda function must have permission to send traces to AWS X-Ray - Here is an example using AWS Serverless Application Model (SAM) @@ -63,6 +69,10 @@ def handler(event, context): charge_id = event.get('charge_id') payment = collect_payment(charge_id) ... + +@tracer.capture_lambda_handler(capture_response=False) # highlight-line +def handler(event, context): + return "sensitive_information" ``` ### Annotations @@ -108,7 +118,10 @@ def collect_payment(charge_id): ret = requests.post(PAYMENT_ENDPOINT) # logic tracer.put_annotation("PAYMENT_STATUS", "SUCCESS") # custom annotation return ret -... + +@tracer.capture_method(capture_response=False) # highlight-line +def sensitive_information_to_be_processed(): + return "sensitive_information" ``` ## Asynchronous and generator functions From f182b655b5f477d3e620fc95ff645ab22723b92c Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 22 Aug 2020 21:20:20 +0200 Subject: [PATCH 16/42] docs: grammar --- docs/content/core/tracer.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx index 004e1ad6acf..36a0cded9f0 100644 --- a/docs/content/core/tracer.mdx +++ b/docs/content/core/tracer.mdx @@ -20,7 +20,7 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. Returning sensitive information from your Lambda handler or functions, where Tracer is used?

- You can disable capturing their responses as tracing metadata with capture_response=False parameter for both capture_lambda_handler and capture_method decorators. + You can disable Tracer from capturing their responses as tracing metadata with capture_response=False parameter in both capture_lambda_handler and capture_method decorators.

## Initialization From 1f43ce4c26756dfe59d3bfa06a57b8aa97457501 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 11:47:04 +0200 Subject: [PATCH 17/42] line endings --- .github/workflows/publish_layer.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/publish_layer.yml diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml new file mode 100644 index 00000000000..0f9c7d0e183 --- /dev/null +++ b/.github/workflows/publish_layer.yml @@ -0,0 +1,26 @@ +name: Publish lambda layer to SAR + +on: + push: + branches: + - feat/lambda-layer +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: eu-central-1 + AWS_DEFAULT_OUTPUT: json + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Set release notes tag + run: | + export RELEASE_TAG_VERSION=${{ github.event.release.tag_name }} + echo ::set-env name=RELEASE_TAG_VERSION::${RELEASE_TAG_VERSION:1} + - name: write version to ssm + run: | + aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION + - name: trigger lamber layer publish pipeline + run: | + aws codepipeline start-pipeline-execution --name ${{ secret.CODEPIPELINE_NAME }} From eea437548771e637cdfe1bd1f4462e7b0c0b6c99 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 11:55:16 +0200 Subject: [PATCH 18/42] fix indent, yaml ... --- .github/workflows/publish_layer.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index 0f9c7d0e183..94fc3da984e 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -11,16 +11,16 @@ env: AWS_DEFAULT_OUTPUT: json jobs: - build: + publish: runs-on: ubuntu-latest steps: - - name: Set release notes tag - run: | - export RELEASE_TAG_VERSION=${{ github.event.release.tag_name }} - echo ::set-env name=RELEASE_TAG_VERSION::${RELEASE_TAG_VERSION:1} - - name: write version to ssm - run: | - aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION - - name: trigger lamber layer publish pipeline - run: | - aws codepipeline start-pipeline-execution --name ${{ secret.CODEPIPELINE_NAME }} + - name: Set release notes tag + run: | + export RELEASE_TAG_VERSION=${{ github.event.release.tag_name }} + echo ::set-env name=RELEASE_TAG_VERSION::${RELEASE_TAG_VERSION:1} + - name: write version to ssm + run: | + aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION + - name: trigger lamber layer publish pipeline + run: | + aws codepipeline start-pipeline-execution --name ${{ secret.CODEPIPELINE_NAME }} From a68c967c0107923176b5f93a028896e987a4d3db Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 11:58:23 +0200 Subject: [PATCH 19/42] fix: remove actual response from debug logs --- aws_lambda_powertools/tracing/tracer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index b21eb80266f..78917fe72ef 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -281,7 +281,6 @@ def decorate(event, context): logger.debug("Calling lambda handler") response = lambda_handler(event, context) logger.debug("Received lambda handler response successfully") - logger.debug(response) self._add_response_as_metadata( method_name=lambda_handler_name, data=response, From 194959392570ccbec873518afa10184b520b8fc9 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 12:00:46 +0200 Subject: [PATCH 20/42] fix typo in branch trigger --- .github/workflows/publish_layer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index 94fc3da984e..f3c501d72b4 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -3,7 +3,7 @@ name: Publish lambda layer to SAR on: push: branches: - - feat/lambda-layer + - feat/lambda-layers env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} From 91c05e675e2476157c818f3f177619059633f5ff Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 12:01:04 +0200 Subject: [PATCH 21/42] chore: update internal docstrings for consistency --- aws_lambda_powertools/tracing/tracer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 78917fe72ef..25caacb651e 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -571,7 +571,7 @@ def _add_response_as_metadata( Parameters ---------- method_name : str, optional - function name to add as metadata key, by default None + method name to add as metadata key, by default None data : Any, optional data to add as subsegment metadata, by default None subsegment : aws_xray_sdk.core.models.subsegment, optional @@ -592,7 +592,7 @@ def _add_full_exception_as_metadata( Parameters ---------- method_name : str, optional - function name to add as metadata key, by default None + method name to add as metadata key, by default None error : Exception, optional error to add as subsegment metadata, by default None subsegment : aws_xray_sdk.core.models.subsegment, optional From 6ddd736a85cbf030a128fdcc9a48a16ca7617daf Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 12:01:19 +0200 Subject: [PATCH 22/42] more typos --- .github/workflows/publish_layer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index f3c501d72b4..b98f966463c 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -23,4 +23,4 @@ jobs: aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION - name: trigger lamber layer publish pipeline run: | - aws codepipeline start-pipeline-execution --name ${{ secret.CODEPIPELINE_NAME }} + aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }} From e6e5c1277013b46525b7398d7934d198384e6860 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 12:03:48 +0200 Subject: [PATCH 23/42] add relase tag simulation --- .github/workflows/publish_layer.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index b98f966463c..463a46f3e78 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -20,6 +20,7 @@ jobs: echo ::set-env name=RELEASE_TAG_VERSION::${RELEASE_TAG_VERSION:1} - name: write version to ssm run: | + RELEASE_TAG_VERSION=1.3.0 # simulate release here, will be removed later aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION - name: trigger lamber layer publish pipeline run: | From dac548cde622e7474880459bc0c958cef0a75406 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 14:55:01 +0200 Subject: [PATCH 24/42] add overwrite flag for ssm --- .github/workflows/publish_layer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index 463a46f3e78..91633d62ee9 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -21,7 +21,7 @@ jobs: - name: write version to ssm run: | RELEASE_TAG_VERSION=1.3.0 # simulate release here, will be removed later - aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION + aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite - name: trigger lamber layer publish pipeline run: | aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }} From 7e0b8727f42c7a114eb2bbba00617033b9f80f7a Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 15:00:48 +0200 Subject: [PATCH 25/42] remove tmp release flag and set trigger to release published --- .github/workflows/publish_layer.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index 91633d62ee9..27a5de64287 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -1,9 +1,8 @@ name: Publish lambda layer to SAR on: - push: - branches: - - feat/lambda-layers + release: + types: [published] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -20,7 +19,6 @@ jobs: echo ::set-env name=RELEASE_TAG_VERSION::${RELEASE_TAG_VERSION:1} - name: write version to ssm run: | - RELEASE_TAG_VERSION=1.3.0 # simulate release here, will be removed later aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite - name: trigger lamber layer publish pipeline run: | From e8d77b7b001c9623f18dac80695e27215915077f Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Sun, 23 Aug 2020 15:07:53 +0200 Subject: [PATCH 26/42] change to eu-west-1 default region --- .github/workflows/publish_layer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index 27a5de64287..84f17151a31 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -6,7 +6,7 @@ on: env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: eu-central-1 + AWS_DEFAULT_REGION: eu-west-1 AWS_DEFAULT_OUTPUT: json jobs: From 64f7aa8ad66503cc86b1f377355b4ebdb24cfd7e Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Sun, 23 Aug 2020 17:00:55 +0200 Subject: [PATCH 27/42] docs: move concurrent asynchronous under escape hatch Signed-off-by: heitorlessa --- docs/content/core/tracer.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx index 36a0cded9f0..4ebdc478d97 100644 --- a/docs/content/core/tracer.mdx +++ b/docs/content/core/tracer.mdx @@ -199,7 +199,7 @@ You can use `tracer.provider` attribute to access all methods provided by AWS X- This is useful when you need a feature available in X-Ray that is not available in the Tracer utility, for example [thread-safe](https://github.com/aws/aws-xray-sdk-python/#user-content-trace-threadpoolexecutor), or [context managers](https://github.com/aws/aws-xray-sdk-python/#user-content-start-a-custom-segmentsubsegment). -## Concurrent asynchronous functions +### Concurrent asynchronous functions As of now, X-Ray SDK will raise an exception when async functions are run and traced concurrently. From 3faf915b54d6e646e3939b88995e3a2254be5824 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:01:18 +0200 Subject: [PATCH 28/42] docs: create Patching modules section; cleanup response wording Signed-off-by: heitorlessa --- docs/content/core/tracer.mdx | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx index 4ebdc478d97..a01cf8fb083 100644 --- a/docs/content/core/tracer.mdx +++ b/docs/content/core/tracer.mdx @@ -58,7 +58,7 @@ You can trace your Lambda function handler via `capture_lambda_handler`. When using this decorator, Tracer performs these additional tasks to ease operations: * Creates a `ColdStart` annotation to easily filter traces that have had an initialization overhead -* Adds any response, or full exceptions generated by the handler as metadata +* Captures any response, or full exceptions generated by the handler, and include as tracing metadata ```python:title=lambda_handler.py from aws_lambda_powertools import Tracer @@ -70,6 +70,8 @@ def handler(event, context): payment = collect_payment(charge_id) ... +# Disables Tracer from capturing response and adding as metadata +# Useful when dealing with sensitive data @tracer.capture_lambda_handler(capture_response=False) # highlight-line def handler(event, context): return "sensitive_information" @@ -170,7 +172,23 @@ def handler(evt, ctx): # highlight-line another_result = list(collect_payment_gen()) ``` -## Tracing aiohttp requests +## Patching modules + +Tracer automatically patches all [supported libraries by X-Ray](https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-patching.html) during initialization, by default. Underneath, AWS X-Ray SDK checks whether a supported library has been imported before patching. + +If you're looking to shave a few microseconds, or milliseconds depending on your function memory configuration, you can patch specific modules using `patch_modules` param: + +```python:title=app.py +import boto3 +import requests + +from aws_lambda_powertools import Tracer + +modules_to_be_patched = ["boto3", "requests"] +tracer = Tracer(patch_modules=modules_to_be_patched) # highlight-line +``` + +### Tracing aiohttp requests This snippet assumes you have aiohttp as a dependency From 10cb3a98bcf01abb0e120d871508e698b7fefc23 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:22:11 +0200 Subject: [PATCH 29/42] docs: make sensitive info more explicit with an example Signed-off-by: heitorlessa --- docs/content/core/tracer.mdx | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx index a01cf8fb083..4042d51a861 100644 --- a/docs/content/core/tracer.mdx +++ b/docs/content/core/tracer.mdx @@ -17,12 +17,6 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. * Support tracing async methods, generators, and context managers * Auto patch supported modules, or a tuple of explicit modules supported by AWS X-Ray - - Returning sensitive information from your Lambda handler or functions, where Tracer is used? -

- You can disable Tracer from capturing their responses as tracing metadata with capture_response=False parameter in both capture_lambda_handler and capture_method decorators. -

- ## Initialization Your AWS Lambda function must have permission to send traces to AWS X-Ray - Here is an example using AWS Serverless Application Model (SAM) @@ -69,7 +63,15 @@ def handler(event, context): charge_id = event.get('charge_id') payment = collect_payment(charge_id) ... +``` + + + Returning sensitive information from your Lambda handler or functions, where Tracer is used? +

+ You can disable Tracer from capturing their responses as tracing metadata with capture_response=False parameter in both capture_lambda_handler and capture_method decorators. +

+```python:title=do_not_capture_response_as_metadata.py # Disables Tracer from capturing response and adding as metadata # Useful when dealing with sensitive data @tracer.capture_lambda_handler(capture_response=False) # highlight-line From 44db087cbd1605d3ca55a01a1f7e979d6a20d720 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:23:29 +0200 Subject: [PATCH 30/42] docs: fix typos, log_event & sampling wording Signed-off-by: heitorlessa --- docs/content/core/logger.mdx | 37 ++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/docs/content/core/logger.mdx b/docs/content/core/logger.mdx index 225fb401cf1..618d7a78435 100644 --- a/docs/content/core/logger.mdx +++ b/docs/content/core/logger.mdx @@ -58,7 +58,7 @@ Key | Type | Example | Description **sampling_rate** | int | 0.1 | Debug logging sampling rate in percentage e.g. 1% in this case **message** | any | "Collecting payment" | Log statement value. Unserializable JSON values will be casted to string -## Capturing context Lambda info +## Capturing Lambda context info You can enrich your structured logs with key Lambda context information via `inject_lambda_context`. @@ -79,22 +79,6 @@ def handler(event, context): ... ``` -You can also explicitly log any incoming event using `log_event` param or via `POWERTOOLS_LOGGER_LOG_EVENT` env var. - - - This is disabled by default to prevent sensitive info being logged. -
- -```python:title=log_handler_event.py -from aws_lambda_powertools import Logger - -logger = Logger() - -@logger.inject_lambda_context(log_event=True) # highlight-start -def handler(event, context): - ... -``` - When used, this will include the following keys: Key | Type | Example @@ -145,6 +129,23 @@ Key | Type | Example } ``` +
+ +You can also explicitly log any incoming event using `log_event` param or via `POWERTOOLS_LOGGER_LOG_EVENT` env var. + + + This is disabled by default to prevent sensitive info being logged. +
+ +```python:title=log_handler_event.py +from aws_lambda_powertools import Logger + +logger = Logger() + +@logger.inject_lambda_context(log_event=True) # highlight-line +def handler(event, context): + ... +``` ## Appending additional keys @@ -222,7 +223,7 @@ If you ever forget to use `child` param, we will return an existing `Logger` wit You can dynamically set a percentage of your logs to **DEBUG** level using `sample_rate` param or via env var `POWERTOOLS_LOGGER_SAMPLE_RATE`. -This happens on an entire request basis, and DEBUG level is set at the constructor. That means, concurrent requests or infrequent invocations are more likely to occur as [new Lambda execution contexts are created](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), not reused. +Sampling calculation happens at the Logger class initialization. This means, when configured it, sampling it's more likely to happen during concurrent requests, or infrequent invocations as [new Lambda execution contexts are created](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), not reused. If you want this logic to happen on every invocation regardless whether Lambda reuses the execution environment or not, then create your Logger inside your Lambda handler. From 705371e99474be60b3f2f93479bb511ad98e5362 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:46:34 +0200 Subject: [PATCH 31/42] docs: subtle rewording for better clarity Signed-off-by: heitorlessa --- docs/content/core/metrics.mdx | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/content/core/metrics.mdx b/docs/content/core/metrics.mdx index 696074c6812..9b750c28622 100644 --- a/docs/content/core/metrics.mdx +++ b/docs/content/core/metrics.mdx @@ -5,7 +5,9 @@ description: Core utility import Note from "../../src/components/Note" -Metrics creates custom metrics asynchronously via logging metrics to standard output following Amazon CloudWatch Embedded Metric Format (EMF). +Metrics creates custom metrics asynchronously by logging metrics to standard output following Amazon CloudWatch Embedded Metric Format (EMF). + +These metrics can be visualized through [Amazon CloudWatch Console](https://console.aws.amazon.com/cloudwatch/). **Key features** @@ -32,7 +34,9 @@ Resources: ``` We recommend you use your application or main service as a metric namespace. -You can explicitly set a namespace name via `namespace` param or via `POWERTOOLS_METRICS_NAMESPACE` env var. This sets **namespace** key that will be used for all metrics. +You can explicitly set a namespace name via `namespace` param or via `POWERTOOLS_METRICS_NAMESPACE` env var. + +This sets **namespace** key that will be used for all metrics. You can also pass a service name via `service` param or `POWERTOOLS_SERVICE_NAME` env var. This will create a dimension with the service name. ```python:title=app.py @@ -67,7 +71,7 @@ metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) `MetricUnit` enum facilitate finding a supported metric unit by CloudWatch. Alternatively, you can pass the value as a string if you already know them e.g. "Count". -CloudWatch EMF supports a max of 100 metrics. Metrics will automatically flush all metrics when adding the 100th metric, where subsequent metrics will be aggregated into a new EMF object. +CloudWatch EMF supports a max of 100 metrics. Metrics utility will flush all metrics when adding the 100th metric while subsequent metrics will be aggregated into a new EMF object, for your convenience. ## Creating a metric with a different dimension From ac0aba9cd9786cb53578ba2c07fa8a14ae6c942b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:47:00 +0200 Subject: [PATCH 32/42] docs: add blog post, and quick example Signed-off-by: heitorlessa --- docs/content/index.mdx | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index e5c2688ecc7..be10589f270 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -3,8 +3,15 @@ title: Homepage description: AWS Lambda Powertools Python --- +import Note from "../src/components/Note" + Powertools is a suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, structured logging and creating custom metrics asynchronously easier. + + Looking for a quick run through of the core utilities?

+ Check out this detailed blog post with a practical example. +

+ ## Install Powertools is available in PyPi. You can use your favourite dependency management tool to install it @@ -12,6 +19,11 @@ Powertools is available in PyPi. You can use your favourite dependency managemen * [poetry](https://python-poetry.org/): `poetry add aws-lambda-powertools` * [pip](https://pip.pypa.io/en/latest/index.html): `pip install aws-lambda-powertools` +**Quick hello world example using SAM CLI** +```bash:title=hello_world.sh +sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python +``` + ## Features * [Tracing](./core/tracer) - Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions From e89fbea5689bbe2390d892984ad6ba77fe2b739f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:53:17 +0200 Subject: [PATCH 33/42] docs: use table for clarity Signed-off-by: heitorlessa --- docs/content/index.mdx | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index be10589f270..412cee68701 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -26,11 +26,13 @@ sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python ## Features -* [Tracing](./core/tracer) - Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions -* [Logging](./core/logger) - Structured logging made easier, and decorator to enrich structured logging with key Lambda context details -* [Metrics](./core/metrics) - Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) -* [Bring your own middleware](./utilities/middleware_factory) - Decorator factory to create your own middleware to run logic before, and after each Lambda invocation -* [Parameters utility](./utilities/parameters) - Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time +Utility | Description +------------------------------------------------- | --------------------------------------------------------------------------------- +[Tracing](./core/tracer) | Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions +[Logging](./core/logger) | Structured logging made easier, and decorator to enrich structured logging with key Lambda context details +[Metrics](./core/metrics) | Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) +[Bring your own middleware](.//utilities/middleware_factory) | Decorator factory to create your own middleware to run logic before, and after each Lambda invocation +[Parameters utility](./utilities/parameters) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time ## Tenets @@ -49,10 +51,10 @@ _`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary Environment variable | Description | Utility ------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- -**POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | all +**POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics) **POWERTOOLS_TRACE_DISABLED** | Disables tracing | [Tracing](./core/tracer) -**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [middleware_factory](./utilities/middleware_factory) +**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory) **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) **LOG_LEVEL** | Sets logging level | [Logging](./core/logger) From efd38d823e707864a7c4e8e986cc414524f3a817 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 23 Aug 2020 19:56:10 +0200 Subject: [PATCH 34/42] docs: move tenets; remove extra space Signed-off-by: heitorlessa --- docs/content/index.mdx | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index 412cee68701..02b2846688d 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -10,7 +10,7 @@ Powertools is a suite of utilities for AWS Lambda Functions that makes tracing w Looking for a quick run through of the core utilities?

Check out this detailed blog post with a practical example. -

+
## Install @@ -34,17 +34,6 @@ Utility | Description [Bring your own middleware](.//utilities/middleware_factory) | Decorator factory to create your own middleware to run logic before, and after each Lambda invocation [Parameters utility](./utilities/parameters) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time -## Tenets - -* **AWS Lambda only** – We optimise for AWS Lambda function environments and supported runtimes only. Utilities might work with web frameworks and non-Lambda environments, though they are not officially supported. -* **Eases the adoption of best practices** – The main priority of the utilities is to facilitate best practices adoption, as defined in the AWS Well-Architected Serverless Lens; all other functionality is optional. -* **Keep it lean** – Additional dependencies are carefully considered for security and ease of maintenance, and prevent negatively impacting startup time. -* **We strive for backwards compatibility** – New features and changes should keep backwards compatibility. If a breaking change cannot be avoided, the deprecation and migration process should be clearly defined. -* **We work backwards from the community** – We aim to strike a balance of what would work best for 80% of customers. Emerging practices are considered and discussed via Requests for Comment (RFCs) -* **Idiomatic** – Utilities follow programming language idioms and language-specific best practices. - -_`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary across languages._ - ## Environment variables **Environment variables** used across suite of utilities. @@ -68,3 +57,14 @@ from aws_lambda_powertools.logging.logger import set_package_logger set_package_logger() ``` + +## Tenets + +* **AWS Lambda only** – We optimise for AWS Lambda function environments and supported runtimes only. Utilities might work with web frameworks and non-Lambda environments, though they are not officially supported. +* **Eases the adoption of best practices** – The main priority of the utilities is to facilitate best practices adoption, as defined in the AWS Well-Architected Serverless Lens; all other functionality is optional. +* **Keep it lean** – Additional dependencies are carefully considered for security and ease of maintenance, and prevent negatively impacting startup time. +* **We strive for backwards compatibility** – New features and changes should keep backwards compatibility. If a breaking change cannot be avoided, the deprecation and migration process should be clearly defined. +* **We work backwards from the community** – We aim to strike a balance of what would work best for 80% of customers. Emerging practices are considered and discussed via Requests for Comment (RFCs) +* **Idiomatic** – Utilities follow programming language idioms and language-specific best practices. + +_`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary across languages._ From 96b7b8f627d045b2dd48bfe413ed1f6c6eabd7e6 Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Mon, 24 Aug 2020 00:16:28 -0700 Subject: [PATCH 35/42] fix(ssm): Make decrypt an explicit option and refactoring (#123) * fix(ssm): Make decrypt an explicit option * chore: declare as self * fix: update get_parameter and get_parameters Changes: ssm.py - get_parameters - pass through the **sdk_options and merge in the recursive and decrypt params ssm.py - get_parameter - add explicit option for decrypt * chore: fix typos and type hinting * tests: verify that the default kwargs are set - `decrypt` should be false by default - `recursive` should be true by default * fix(capture_method): should yield inside with (#124) Changes: * capture_method should yield from within the "with" statement * Add missing test cases Closes #112 * chore: version bump to 1.3.1 * refactor: reduce get_multiple complexity Changes: - base.py - update get_multiple to reduce the overall complexity - base.py - `_has_not_expired` returns whether a key exists and has not expired - base.py - `transform_value` add `raise_on_transform_error` and default to True - test_utilities_parameters.py - Add a direct test of transform_value * refactor: revert to a regular for each Changes: * Add type hint to `values` as it can change later on in transform * Use a slightly faster and easier to read for each over dict comprehension Co-authored-by: Tom McCarthy --- .../utilities/parameters/base.py | 71 ++++++++++--------- .../utilities/parameters/secrets.py | 2 +- .../utilities/parameters/ssm.py | 62 ++++++++++++++-- tests/functional/test_utilities_parameters.py | 13 ++++ 4 files changed, 107 insertions(+), 41 deletions(-) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index 8a552b53bcb..274cd96aace 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -7,7 +7,7 @@ from abc import ABC, abstractmethod from collections import namedtuple from datetime import datetime, timedelta -from typing import Dict, Optional, Union +from typing import Dict, Optional, Tuple, Union from .exceptions import GetParameterError, TransformParameterError @@ -31,6 +31,9 @@ def __init__(self): self.store = {} + def _has_not_expired(self, key: Tuple[str, Optional[str]]) -> bool: + return key in self.store and self.store[key].ttl >= datetime.now() + def get( self, name: str, max_age: int = DEFAULT_MAX_AGE_SECS, transform: Optional[str] = None, **sdk_options ) -> Union[str, list, dict, bytes]: @@ -70,24 +73,26 @@ def get( # an acceptable tradeoff. key = (name, transform) - if key not in self.store or self.store[key].ttl < datetime.now(): - try: - value = self._get(name, **sdk_options) - # Encapsulate all errors into a generic GetParameterError - except Exception as exc: - raise GetParameterError(str(exc)) + if self._has_not_expired(key): + return self.store[key].value + + try: + value = self._get(name, **sdk_options) + # Encapsulate all errors into a generic GetParameterError + except Exception as exc: + raise GetParameterError(str(exc)) - if transform is not None: - value = transform_value(value, transform) + if transform is not None: + value = transform_value(value, transform) - self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age),) + self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age),) - return self.store[key].value + return value @abstractmethod def _get(self, name: str, **sdk_options) -> str: """ - Retrieve paramater value from the underlying parameter store + Retrieve parameter value from the underlying parameter store """ raise NotImplementedError() @@ -129,29 +134,22 @@ def get_multiple( key = (path, transform) - if key not in self.store or self.store[key].ttl < datetime.now(): - try: - values = self._get_multiple(path, **sdk_options) - # Encapsulate all errors into a generic GetParameterError - except Exception as exc: - raise GetParameterError(str(exc)) + if self._has_not_expired(key): + return self.store[key].value - if transform is not None: - new_values = {} - for key, value in values.items(): - try: - new_values[key] = transform_value(value, transform) - except Exception as exc: - if raise_on_transform_error: - raise exc - else: - new_values[key] = None + try: + values: Dict[str, Union[str, bytes, dict, None]] = self._get_multiple(path, **sdk_options) + # Encapsulate all errors into a generic GetParameterError + except Exception as exc: + raise GetParameterError(str(exc)) - values = new_values + if transform is not None: + for (key, value) in values.items(): + values[key] = transform_value(value, transform, raise_on_transform_error) - self.store[key] = ExpirableValue(values, datetime.now() + timedelta(seconds=max_age),) + self.store[key] = ExpirableValue(values, datetime.now() + timedelta(seconds=max_age),) - return self.store[key].value + return values @abstractmethod def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: @@ -161,16 +159,19 @@ def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: raise NotImplementedError() -def transform_value(value: str, transform: str) -> Union[dict, bytes]: +def transform_value(value: str, transform: str, raise_on_transform_error: bool = True) -> Union[dict, bytes, None]: """ Apply a transform to a value Parameters --------- value: str - Parameter alue to transform + Parameter value to transform transform: str Type of transform, supported values are "json" and "binary" + raise_on_transform_error: bool, optional + Raises an exception if any transform fails, otherwise this will + return a None value for each transform that failed Raises ------ @@ -187,4 +188,6 @@ def transform_value(value: str, transform: str) -> Union[dict, bytes]: raise ValueError(f"Invalid transform type '{transform}'") except Exception as exc: - raise TransformParameterError(str(exc)) + if raise_on_transform_error: + raise TransformParameterError(str(exc)) + return None diff --git a/aws_lambda_powertools/utilities/parameters/secrets.py b/aws_lambda_powertools/utilities/parameters/secrets.py index ee4585309fe..67cb94c340b 100644 --- a/aws_lambda_powertools/utilities/parameters/secrets.py +++ b/aws_lambda_powertools/utilities/parameters/secrets.py @@ -77,7 +77,7 @@ def _get(self, name: str, **sdk_options) -> str: ---------- name: str Name of the parameter - sdk_options: dict + sdk_options: dict, optional Dictionary of options that will be passed to the Secrets Manager get_secret_value API call """ diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py index b458f8690d0..0f39bfac9c0 100644 --- a/aws_lambda_powertools/utilities/parameters/ssm.py +++ b/aws_lambda_powertools/utilities/parameters/ssm.py @@ -8,7 +8,7 @@ import boto3 from botocore.config import Config -from .base import DEFAULT_PROVIDERS, BaseProvider +from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider class SSMProvider(BaseProvider): @@ -86,6 +86,46 @@ def __init__( super().__init__() + def get( + self, + name: str, + max_age: int = DEFAULT_MAX_AGE_SECS, + transform: Optional[str] = None, + decrypt: bool = False, + **sdk_options + ) -> Union[str, list, dict, bytes]: + """ + Retrieve a parameter value or return the cached value + + Parameters + ---------- + name: str + Parameter name + max_age: int + Maximum age of the cached value + transform: str + Optional transformation of the parameter value. Supported values + are "json" for JSON strings and "binary" for base 64 encoded + values. + decrypt: bool, optional + If the parameter value should be decrypted + sdk_options: dict, optional + Arguments that will be passed directly to the underlying API call + + Raises + ------ + GetParameterError + When the parameter provider fails to retrieve a parameter value for + a given name. + TransformParameterError + When the parameter provider fails to transform a parameter value. + """ + + # Add to `decrypt` sdk_options to we can have an explicit option for this + sdk_options["decrypt"] = decrypt + + return super().get(name, max_age, transform, **sdk_options) + def _get(self, name: str, decrypt: bool = False, **sdk_options) -> str: """ Retrieve a parameter value from AWS Systems Manager Parameter Store @@ -144,7 +184,9 @@ def _get_multiple(self, path: str, decrypt: bool = False, recursive: bool = Fals return parameters -def get_parameter(name: str, transform: Optional[str] = None, **sdk_options) -> Union[str, list, dict, bytes]: +def get_parameter( + name: str, transform: Optional[str] = None, decrypt: bool = False, **sdk_options +) -> Union[str, list, dict, bytes]: """ Retrieve a parameter value from AWS Systems Manager (SSM) Parameter Store @@ -154,6 +196,8 @@ def get_parameter(name: str, transform: Optional[str] = None, **sdk_options) -> Name of the parameter transform: str, optional Transforms the content from a JSON object ('json') or base64 binary string ('binary') + decrypt: bool, optional + If the parameter values should be decrypted sdk_options: dict, optional Dictionary of options that will be passed to the Parameter Store get_parameter API call @@ -190,7 +234,10 @@ def get_parameter(name: str, transform: Optional[str] = None, **sdk_options) -> if "ssm" not in DEFAULT_PROVIDERS: DEFAULT_PROVIDERS["ssm"] = SSMProvider() - return DEFAULT_PROVIDERS["ssm"].get(name, transform=transform) + # Add to `decrypt` sdk_options to we can have an explicit option for this + sdk_options["decrypt"] = decrypt + + return DEFAULT_PROVIDERS["ssm"].get(name, transform=transform, **sdk_options) def get_parameters( @@ -205,10 +252,10 @@ def get_parameters( Path to retrieve the parameters transform: str, optional Transforms the content from a JSON object ('json') or base64 binary string ('binary') - decrypt: bool, optional - If the parameter values should be decrypted recursive: bool, optional If this should retrieve the parameter values recursively or not, defaults to True + decrypt: bool, optional + If the parameter values should be decrypted sdk_options: dict, optional Dictionary of options that will be passed to the Parameter Store get_parameters_by_path API call @@ -245,4 +292,7 @@ def get_parameters( if "ssm" not in DEFAULT_PROVIDERS: DEFAULT_PROVIDERS["ssm"] = SSMProvider() - return DEFAULT_PROVIDERS["ssm"].get_multiple(path, transform=transform, recursive=recursive, decrypt=decrypt) + sdk_options["recursive"] = recursive + sdk_options["decrypt"] = decrypt + + return DEFAULT_PROVIDERS["ssm"].get_multiple(path, transform=transform, **sdk_options) diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py index 7a0677b2197..abd121540a6 100644 --- a/tests/functional/test_utilities_parameters.py +++ b/tests/functional/test_utilities_parameters.py @@ -1310,6 +1310,7 @@ def test_get_parameter_new(monkeypatch, mock_name, mock_value): class TestProvider(BaseProvider): def _get(self, name: str, **kwargs) -> str: assert name == mock_name + assert not kwargs["decrypt"] return mock_value def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: @@ -1355,6 +1356,8 @@ def _get(self, name: str, **kwargs) -> str: def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: assert path == mock_name + assert kwargs["recursive"] + assert not kwargs["decrypt"] return mock_value monkeypatch.setattr(parameters.ssm, "DEFAULT_PROVIDERS", {}) @@ -1468,3 +1471,13 @@ def test_transform_value_wrong(mock_value): parameters.base.transform_value(mock_value, "INCORRECT") assert "Invalid transform type" in str(excinfo) + + +def test_transform_value_ignore_error(mock_value): + """ + Test transform_value() does not raise errors when raise_on_transform_error is False + """ + + value = parameters.base.transform_value(mock_value, "INCORRECT", raise_on_transform_error=False) + + assert value is None From 2455001e11371f3f139e24912e5e1fa2711ae4c6 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Mon, 24 Aug 2020 13:18:31 +0200 Subject: [PATCH 36/42] moved publish step to publish workflow after pypi push --- .github/workflows/publish.yml | 11 +++++++++++ .github/workflows/publish_layer.yml | 25 ------------------------- 2 files changed, 11 insertions(+), 25 deletions(-) delete mode 100644 .github/workflows/publish_layer.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4a2bed27deb..058c8490934 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -56,6 +56,17 @@ jobs: env: PYPI_USERNAME: __token__ PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + - name: publish lambda layer in SAR by triggering the internal codepipeline + run: | + aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite + aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: eu-west-1 + AWS_DEFAULT_OUTPUT: json + + sync_master: needs: upload diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml deleted file mode 100644 index 84f17151a31..00000000000 --- a/.github/workflows/publish_layer.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Publish lambda layer to SAR - -on: - release: - types: [published] -env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: eu-west-1 - AWS_DEFAULT_OUTPUT: json - -jobs: - publish: - runs-on: ubuntu-latest - steps: - - name: Set release notes tag - run: | - export RELEASE_TAG_VERSION=${{ github.event.release.tag_name }} - echo ::set-env name=RELEASE_TAG_VERSION::${RELEASE_TAG_VERSION:1} - - name: write version to ssm - run: | - aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite - - name: trigger lamber layer publish pipeline - run: | - aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }} From ac10b4f8c7cec8184e0fc0ca57caa0a75d20fe17 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Mon, 24 Aug 2020 17:26:44 +0200 Subject: [PATCH 37/42] add layer to docs and how to use it from SAR --- docs/content/index.mdx | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index e5c2688ecc7..ca1f8e6b5be 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -54,3 +54,18 @@ from aws_lambda_powertools.logging.logger import set_package_logger set_package_logger() ``` +## Lambda Layer + +Powertools is also available as a layer and is distributed via the [Serverless Application Repository](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/what-is-serverlessrepo.html). +Create the layer via API with: + +``` +aws serverlessrepo create-cloud-formation-change-set --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer --stack-name YOUR_STACK_NAME --capabilities CAPABILITY_NAMED_IAM +CHANGE_SET_ID=$(aws cloudformation list-change-sets --stack-name YOUR_STACK_NAME --query 'Summaries[*].ChangeSetId' --output text) +aws cloudformation wait change-set-create-complete --change-set-name $CHANGE_SET_ID +aws cloudformation execute-change-set --change-set-name $CHANGE_SET_ID +``` + +this will create a CloudFormation stack with the powertools layer in a specific region where you run this commands. +Keep in mind that layers are regional resources and you need to create the layer in every region where you need it. +Alternatively, you can deploy the layer from the AWS Console by navigating to Serverless Application Repository and search for `aws-lambda-powertools-python-layer` and follow the deploy steps from there. From 679b22faecb31d490b94a70494e67dbec6bdc7d0 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Mon, 24 Aug 2020 17:36:37 +0200 Subject: [PATCH 38/42] formatting for bash script --- docs/content/index.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index ca1f8e6b5be..ecd29a461d8 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -59,7 +59,7 @@ set_package_logger() Powertools is also available as a layer and is distributed via the [Serverless Application Repository](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/what-is-serverlessrepo.html). Create the layer via API with: -``` +```bash:title=bash-script aws serverlessrepo create-cloud-formation-change-set --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer --stack-name YOUR_STACK_NAME --capabilities CAPABILITY_NAMED_IAM CHANGE_SET_ID=$(aws cloudformation list-change-sets --stack-name YOUR_STACK_NAME --query 'Summaries[*].ChangeSetId' --output text) aws cloudformation wait change-set-create-complete --change-set-name $CHANGE_SET_ID From 607904c1e574e6176c5e4d69445e9c10f1b141a6 Mon Sep 17 00:00:00 2001 From: Alex Melnyk Date: Tue, 25 Aug 2020 13:46:31 +0200 Subject: [PATCH 39/42] fix heading error due to merge --- docs/content/index.mdx | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index f47b25125bb..aff7949bf93 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -73,7 +73,6 @@ aws cloudformation execute-change-set --change-set-name $CHANGE_SET_ID this will create a CloudFormation stack with the powertools layer in a specific region where you run this commands. Keep in mind that layers are regional resources and you need to create the layer in every region where you need it. Alternatively, you can deploy the layer from the AWS Console by navigating to Serverless Application Repository and search for `aws-lambda-powertools-python-layer` and follow the deploy steps from there. -======= ## Tenets @@ -85,4 +84,3 @@ Alternatively, you can deploy the layer from the AWS Console by navigating to Se * **Idiomatic** – Utilities follow programming language idioms and language-specific best practices. _`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary across languages._ - From 041437b5993c832d0a7ec1f25d6f9ebc334a4bb2 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Aug 2020 17:06:48 +0200 Subject: [PATCH 40/42] fix: upgrade dot-prop, serialize-javascript Signed-off-by: heitorlessa --- docs/package-lock.json | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/docs/package-lock.json b/docs/package-lock.json index 4ca6ae53d77..107a07f81a8 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -6949,9 +6949,9 @@ "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=" }, "dot-prop": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", - "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.1.tgz", + "integrity": "sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ==", "requires": { "is-obj": "^1.0.0" } @@ -18247,9 +18247,12 @@ } }, "serialize-javascript": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-2.1.2.tgz", - "integrity": "sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", + "integrity": "sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==", + "requires": { + "randombytes": "^2.1.0" + } }, "serve-index": { "version": "1.9.1", @@ -19666,15 +19669,15 @@ } }, "terser-webpack-plugin": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz", - "integrity": "sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz", + "integrity": "sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==", "requires": { "cacache": "^12.0.2", "find-cache-dir": "^2.1.0", "is-wsl": "^1.1.0", "schema-utils": "^1.0.0", - "serialize-javascript": "^2.1.2", + "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", "terser": "^4.1.2", "webpack-sources": "^1.4.0", @@ -20365,9 +20368,9 @@ "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=" }, "dot-prop": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", - "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.1.tgz", + "integrity": "sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ==", "requires": { "is-obj": "^1.0.0" } From f9a8c5b19058158a577d374633251125bdf5fc7d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Aug 2020 17:48:54 +0200 Subject: [PATCH 41/42] docs: add Lambda Layer SAR App url and ARN Signed-off-by: heitorlessa --- docs/content/index.mdx | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index aff7949bf93..26ab367ba4c 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -20,10 +20,30 @@ Powertools is available in PyPi. You can use your favourite dependency managemen * [pip](https://pip.pypa.io/en/latest/index.html): `pip install aws-lambda-powertools` **Quick hello world example using SAM CLI** + ```bash:title=hello_world.sh sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python ``` +### Lambda Layer + +Powertools is also available as a Lambda Layer. It is distributed via the [AWS Serverless Application Repository (SAR)](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/what-is-serverlessrepo.html). + +App | ARN +----------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- +[aws-lambda-powertools-python-layer](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer) | arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + +If using SAM, you can include this SAR App as part of your shared Layers stack, and lock to a specific semantic version. Once deployed, it'll be available across the account this is deployed to. + +```yaml + AwsLambdaPowertoolsPythonLayer: + Type: AWS::Serverless::Application + Properties: + Location: + ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + SemanticVersion: 1.3.1 # change to latest semantic version available in SAR +``` + ## Features Utility | Description @@ -58,22 +78,6 @@ from aws_lambda_powertools.logging.logger import set_package_logger set_package_logger() ``` -## Lambda Layer - -Powertools is also available as a layer and is distributed via the [Serverless Application Repository](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/what-is-serverlessrepo.html). -Create the layer via API with: - -```bash:title=bash-script -aws serverlessrepo create-cloud-formation-change-set --application-id arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer --stack-name YOUR_STACK_NAME --capabilities CAPABILITY_NAMED_IAM -CHANGE_SET_ID=$(aws cloudformation list-change-sets --stack-name YOUR_STACK_NAME --query 'Summaries[*].ChangeSetId' --output text) -aws cloudformation wait change-set-create-complete --change-set-name $CHANGE_SET_ID -aws cloudformation execute-change-set --change-set-name $CHANGE_SET_ID -``` - -this will create a CloudFormation stack with the powertools layer in a specific region where you run this commands. -Keep in mind that layers are regional resources and you need to create the layer in every region where you need it. -Alternatively, you can deploy the layer from the AWS Console by navigating to Serverless Application Repository and search for `aws-lambda-powertools-python-layer` and follow the deploy steps from there. - ## Tenets * **AWS Lambda only** – We optimise for AWS Lambda function environments and supported runtimes only. Utilities might work with web frameworks and non-Lambda environments, though they are not officially supported. From 814062e6490aa4c38a840fd515e80afbfdba18a0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Aug 2020 18:03:51 +0200 Subject: [PATCH 42/42] chore: bump to 1.4.0 Signed-off-by: heitorlessa --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e27158b968a..dbbfb6ad2b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.4.0] - 2020-08-25 + +### Added +- **All**: Official Lambda Layer via [Serverless Application Repository](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer) +- **Tracer**: `capture_method` and `capture_lambda_handler` now support **capture_response=False** parameter to prevent Tracer to capture response as metadata to allow customers running Tracer with sensitive workloads + ### Fixed - **Metrics**: Cold start metric is now completely separate from application metrics dimensions, making it easier and cheaper to visualize. - This is a breaking change if you were graphing/alerting on both application metrics with the same name to compensate this previous malfunctioning - Marked as bugfix as this is the intended behaviour since the beginning, as you shouldn't have the same application metric with different dimensions +- **Utilities**: SSMProvider within Parameters utility now have decrypt and recursive parameters correctly defined to support autocompletion ### Added - **Tracer**: capture_lambda_handler and capture_method decorators now support `capture_response` parameter to not include function's response as part of tracing metadata diff --git a/pyproject.toml b/pyproject.toml index 0cfd9c45bed..240ae4ed84d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.3.1" +version = "1.4.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] classifiers=[