diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 4a2bed27deb..058c8490934 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -56,6 +56,17 @@ jobs:
env:
PYPI_USERNAME: __token__
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
+ - name: publish lambda layer in SAR by triggering the internal codepipeline
+ run: |
+ aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_TAG_VERSION --overwrite
+ aws codepipeline start-pipeline-execution --name ${{ secrets.CODEPIPELINE_NAME }}
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: eu-west-1
+ AWS_DEFAULT_OUTPUT: json
+
+
sync_master:
needs: upload
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ce5fba748b7..dbbfb6ad2b8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
+## [1.4.0] - 2020-08-25
+
+### Added
+- **All**: Official Lambda Layer via [Serverless Application Repository](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer)
+- **Tracer**: `capture_method` and `capture_lambda_handler` now support **capture_response=False** parameter to prevent Tracer to capture response as metadata to allow customers running Tracer with sensitive workloads
+
+### Fixed
+- **Metrics**: Cold start metric is now completely separate from application metrics dimensions, making it easier and cheaper to visualize.
+ - This is a breaking change if you were graphing/alerting on both application metrics with the same name to compensate this previous malfunctioning
+ - Marked as bugfix as this is the intended behaviour since the beginning, as you shouldn't have the same application metric with different dimensions
+- **Utilities**: SSMProvider within Parameters utility now have decrypt and recursive parameters correctly defined to support autocompletion
+
+### Added
+- **Tracer**: capture_lambda_handler and capture_method decorators now support `capture_response` parameter to not include function's response as part of tracing metadata
+
## [1.3.1] - 2020-08-22
### Fixed
- **Tracer**: capture_method decorator did not properly handle nested context managers
@@ -44,7 +59,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [1.0.1] - 2020-07-06
### Fixed
-- **Logger**: Fix a bug with `inject_lambda_context` causing existing an Logger keys to be overriden if `structure_logs` was called before
+- **Logger**: Fix a bug with `inject_lambda_context` causing existing Logger keys to be overridden if `structure_logs` was called before
## [1.0.0] - 2020-06-18
### Added
@@ -114,7 +129,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [0.8.0] - 2020-04-24
### Added
-- **Logger**: Introduced `Logger` class for stuctured logging as a replacement for `logger_setup`
+- **Logger**: Introduced `Logger` class for structured logging as a replacement for `logger_setup`
- **Logger**: Introduced `Logger.inject_lambda_context` decorator as a replacement for `logger_inject_lambda_context`
### Removed
diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py
index a1ffe08caf9..175097d9c10 100644
--- a/aws_lambda_powertools/metrics/base.py
+++ b/aws_lambda_powertools/metrics/base.py
@@ -177,7 +177,7 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me
if self.service and not self.dimension_set.get("service"):
self.dimension_set["service"] = self.service
- logger.debug("Serializing...", {"metrics": metrics, "dimensions": dimensions})
+ logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions})
metric_names_and_units: List[Dict[str, str]] = [] # [ { "Name": "metric_name", "Unit": "Count" } ]
metric_names_and_values: Dict[str, str] = {} # { "metric_name": 1.0 }
@@ -207,7 +207,7 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, me
}
try:
- logger.debug("Validating serialized metrics against CloudWatch EMF schema", embedded_metrics_object)
+ logger.debug("Validating serialized metrics against CloudWatch EMF schema")
fastjsonschema.validate(definition=CLOUDWATCH_EMF_SCHEMA, data=embedded_metrics_object)
except fastjsonschema.JsonSchemaException as e:
message = f"Invalid format. Error: {e.message}, Invalid item: {e.name}" # noqa: B306, E501
diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py
index 1293139afbe..4451eb2d1d0 100644
--- a/aws_lambda_powertools/metrics/metric.py
+++ b/aws_lambda_powertools/metrics/metric.py
@@ -110,8 +110,6 @@ def single_metric(name: str, unit: MetricUnit, value: float, namespace: str = No
metric: SingleMetric = SingleMetric(namespace=namespace)
metric.add_metric(name=name, unit=unit, value=value)
yield metric
- logger.debug("Serializing single metric")
metric_set: Dict = metric.serialize_metric_set()
finally:
- logger.debug("Publishing single metric", {"metric": metric})
print(json.dumps(metric_set))
diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py
index fe4fb559d6f..2ab6cb35b4a 100644
--- a/aws_lambda_powertools/metrics/metrics.py
+++ b/aws_lambda_powertools/metrics/metrics.py
@@ -4,7 +4,8 @@
import warnings
from typing import Any, Callable
-from .base import MetricManager
+from .base import MetricManager, MetricUnit
+from .metric import single_metric
logger = logging.getLogger(__name__)
@@ -149,7 +150,6 @@ def decorate(event, context):
else:
metrics = self.serialize_metric_set()
self.clear_metrics()
- logger.debug("Publishing metrics", {"metrics": metrics})
print(json.dumps(metrics))
return response
@@ -167,6 +167,7 @@ def __add_cold_start_metric(self, context: Any):
global is_cold_start
if is_cold_start:
logger.debug("Adding cold start metric and function_name dimension")
- self.add_metric(name="ColdStart", value=1, unit="Count")
- self.add_dimension(name="function_name", value=context.function_name)
- is_cold_start = False
+ with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace=self.namespace) as metric:
+ metric.add_dimension(name="function_name", value=context.function_name)
+ metric.add_dimension(name="service", value=self.service)
+ is_cold_start = False
diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py
index 4c12be3fc26..25caacb651e 100644
--- a/aws_lambda_powertools/tracing/tracer.py
+++ b/aws_lambda_powertools/tracing/tracer.py
@@ -150,7 +150,7 @@ def __init__(
self.auto_patch = self._config["auto_patch"]
if self.disabled:
- self.__disable_tracing_provider()
+ self._disable_tracer_provider()
if self.auto_patch:
self.patch(modules=patch_modules)
@@ -158,19 +158,19 @@ def __init__(
def put_annotation(self, key: str, value: Any):
"""Adds annotation to existing segment or subsegment
+ Parameters
+ ----------
+ key : str
+ Annotation key
+ value : any
+ Value for annotation
+
Example
-------
Custom annotation for a pseudo service named payment
tracer = Tracer(service="payment")
tracer.put_annotation("PaymentStatus", "CONFIRMED")
-
- Parameters
- ----------
- key : str
- Annotation key (e.g. PaymentStatus)
- value : any
- Value for annotation (e.g. "CONFIRMED")
"""
if self.disabled:
logger.debug("Tracing has been disabled, aborting put_annotation")
@@ -226,12 +226,19 @@ def patch(self, modules: Tuple[str] = None):
else:
aws_xray_sdk.core.patch(modules)
- def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = None):
+ def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = None, capture_response: bool = True):
"""Decorator to create subsegment for lambda handlers
As Lambda follows (event, context) signature we can remove some of the boilerplate
and also capture any exception any Lambda function throws or its response as metadata
+ Parameters
+ ----------
+ lambda_handler : Callable
+ Method to annotate on
+ capture_response : bool, optional
+ Instructs tracer to not include handler's response as metadata, by default True
+
Example
-------
**Lambda function using capture_lambda_handler decorator**
@@ -241,16 +248,24 @@ def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = No
def handler(event, context):
...
- Parameters
- ----------
- method : Callable
- Method to annotate on
+ **Preventing Tracer to log response as metadata**
+
+ tracer = Tracer(service="payment")
+ @tracer.capture_lambda_handler(capture_response=False)
+ def handler(event, context):
+ ...
Raises
------
err
Exception raised by method
"""
+ # If handler is None we've been called with parameters
+ # Return a partial function with args filled
+ if lambda_handler is None:
+ logger.debug("Decorator called with parameters")
+ return functools.partial(self.capture_lambda_handler, capture_response=capture_response)
+
lambda_handler_name = lambda_handler.__name__
@functools.wraps(lambda_handler)
@@ -266,14 +281,16 @@ def decorate(event, context):
logger.debug("Calling lambda handler")
response = lambda_handler(event, context)
logger.debug("Received lambda handler response successfully")
- logger.debug(response)
self._add_response_as_metadata(
- function_name=lambda_handler_name, data=response, subsegment=subsegment
+ method_name=lambda_handler_name,
+ data=response,
+ subsegment=subsegment,
+ capture_response=capture_response,
)
except Exception as err:
logger.exception(f"Exception received from {lambda_handler_name}")
self._add_full_exception_as_metadata(
- function_name=lambda_handler_name, error=err, subsegment=subsegment
+ method_name=lambda_handler_name, error=err, subsegment=subsegment
)
raise
@@ -281,7 +298,7 @@ def decorate(event, context):
return decorate
- def capture_method(self, method: Callable = None):
+ def capture_method(self, method: Callable = None, capture_response: bool = True):
"""Decorator to create subsegment for arbitrary functions
It also captures both response and exceptions as metadata
@@ -295,6 +312,13 @@ def capture_method(self, method: Callable = None):
`async.gather` is called, or use `in_subsegment_async`
context manager via our escape hatch mechanism - See examples.
+ Parameters
+ ----------
+ method : Callable
+ Method to annotate on
+ capture_response : bool, optional
+ Instructs tracer to not include method's response as metadata, by default True
+
Example
-------
**Custom function using capture_method decorator**
@@ -416,69 +440,84 @@ async def async_tasks():
return { "task": "done", **ret }
- Parameters
- ----------
- method : Callable
- Method to annotate on
-
Raises
------
err
Exception raised by method
"""
+ # If method is None we've been called with parameters
+ # Return a partial function with args filled
+ if method is None:
+ logger.debug("Decorator called with parameters")
+ return functools.partial(self.capture_method, capture_response=capture_response)
+
+ method_name = f"{method.__name__}"
if inspect.iscoroutinefunction(method):
- decorate = self._decorate_async_function(method=method)
+ decorate = self._decorate_async_function(
+ method=method, capture_response=capture_response, method_name=method_name
+ )
elif inspect.isgeneratorfunction(method):
- decorate = self._decorate_generator_function(method=method)
+ decorate = self._decorate_generator_function(
+ method=method, capture_response=capture_response, method_name=method_name
+ )
elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__):
- decorate = self._decorate_generator_function_with_context_manager(method=method)
+ decorate = self._decorate_generator_function_with_context_manager(
+ method=method, capture_response=capture_response, method_name=method_name
+ )
else:
- decorate = self._decorate_sync_function(method=method)
+ decorate = self._decorate_sync_function(
+ method=method, capture_response=capture_response, method_name=method_name
+ )
return decorate
- def _decorate_async_function(self, method: Callable = None):
- method_name = f"{method.__name__}"
-
+ def _decorate_async_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None):
@functools.wraps(method)
async def decorate(*args, **kwargs):
async with self.provider.in_subsegment_async(name=f"## {method_name}") as subsegment:
try:
logger.debug(f"Calling method: {method_name}")
response = await method(*args, **kwargs)
- self._add_response_as_metadata(function_name=method_name, data=response, subsegment=subsegment)
+ self._add_response_as_metadata(
+ method_name=method_name,
+ data=response,
+ subsegment=subsegment,
+ capture_response=capture_response,
+ )
except Exception as err:
logger.exception(f"Exception received from '{method_name}' method")
- self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment)
+ self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment)
raise
return response
return decorate
- def _decorate_generator_function(self, method: Callable = None):
- method_name = f"{method.__name__}"
-
+ def _decorate_generator_function(
+ self, method: Callable = None, capture_response: bool = True, method_name: str = None
+ ):
@functools.wraps(method)
def decorate(*args, **kwargs):
with self.provider.in_subsegment(name=f"## {method_name}") as subsegment:
try:
logger.debug(f"Calling method: {method_name}")
result = yield from method(*args, **kwargs)
- self._add_response_as_metadata(function_name=method_name, data=result, subsegment=subsegment)
+ self._add_response_as_metadata(
+ method_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response
+ )
except Exception as err:
logger.exception(f"Exception received from '{method_name}' method")
- self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment)
+ self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment)
raise
return result
return decorate
- def _decorate_generator_function_with_context_manager(self, method: Callable = None):
- method_name = f"{method.__name__}"
-
+ def _decorate_generator_function_with_context_manager(
+ self, method: Callable = None, capture_response: bool = True, method_name: str = None
+ ):
@functools.wraps(method)
@contextlib.contextmanager
def decorate(*args, **kwargs):
@@ -488,27 +527,32 @@ def decorate(*args, **kwargs):
with method(*args, **kwargs) as return_val:
result = return_val
yield result
- self._add_response_as_metadata(function_name=method_name, data=result, subsegment=subsegment)
+ self._add_response_as_metadata(
+ method_name=method_name, data=result, subsegment=subsegment, capture_response=capture_response
+ )
except Exception as err:
logger.exception(f"Exception received from '{method_name}' method")
- self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment)
+ self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment)
raise
return decorate
- def _decorate_sync_function(self, method: Callable = None):
- method_name = f"{method.__name__}"
-
+ def _decorate_sync_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None):
@functools.wraps(method)
def decorate(*args, **kwargs):
with self.provider.in_subsegment(name=f"## {method_name}") as subsegment:
try:
logger.debug(f"Calling method: {method_name}")
response = method(*args, **kwargs)
- self._add_response_as_metadata(function_name=method_name, data=response, subsegment=subsegment)
+ self._add_response_as_metadata(
+ method_name=method_name,
+ data=response,
+ subsegment=subsegment,
+ capture_response=capture_response,
+ )
except Exception as err:
logger.exception(f"Exception received from '{method_name}' method")
- self._add_full_exception_as_metadata(function_name=method_name, error=err, subsegment=subsegment)
+ self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment)
raise
return response
@@ -516,46 +560,54 @@ def decorate(*args, **kwargs):
return decorate
def _add_response_as_metadata(
- self, function_name: str = None, data: Any = None, subsegment: aws_xray_sdk.core.models.subsegment = None
+ self,
+ method_name: str = None,
+ data: Any = None,
+ subsegment: aws_xray_sdk.core.models.subsegment = None,
+ capture_response: bool = True,
):
"""Add response as metadata for given subsegment
Parameters
----------
- function_name : str, optional
- function name to add as metadata key, by default None
+ method_name : str, optional
+ method name to add as metadata key, by default None
data : Any, optional
data to add as subsegment metadata, by default None
subsegment : aws_xray_sdk.core.models.subsegment, optional
existing subsegment to add metadata on, by default None
+ capture_response : bool, optional
+ Do not include response as metadata, by default True
"""
- if data is None or subsegment is None:
+ if data is None or not capture_response or subsegment is None:
return
- subsegment.put_metadata(key=f"{function_name} response", value=data, namespace=self._config["service"])
+ subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self._config["service"])
def _add_full_exception_as_metadata(
- self, function_name: str = None, error: Exception = None, subsegment: aws_xray_sdk.core.models.subsegment = None
+ self, method_name: str = None, error: Exception = None, subsegment: aws_xray_sdk.core.models.subsegment = None
):
"""Add full exception object as metadata for given subsegment
Parameters
----------
- function_name : str, optional
- function name to add as metadata key, by default None
+ method_name : str, optional
+ method name to add as metadata key, by default None
error : Exception, optional
error to add as subsegment metadata, by default None
subsegment : aws_xray_sdk.core.models.subsegment, optional
existing subsegment to add metadata on, by default None
"""
- subsegment.put_metadata(key=f"{function_name} error", value=error, namespace=self._config["service"])
+ subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self._config["service"])
- def __disable_tracing_provider(self):
+ @staticmethod
+ def _disable_tracer_provider():
"""Forcefully disables tracing"""
logger.debug("Disabling tracer provider...")
aws_xray_sdk.global_sdk_config.set_sdk_enabled(False)
- def __is_trace_disabled(self) -> bool:
+ @staticmethod
+ def _is_tracer_disabled() -> bool:
"""Detects whether trace has been disabled
Tracing is automatically disabled in the following conditions:
@@ -592,7 +644,7 @@ def __build_config(
provider: aws_xray_sdk.core.xray_recorder = None,
):
""" Populates Tracer config for new and existing initializations """
- is_disabled = disabled if disabled is not None else self.__is_trace_disabled()
+ is_disabled = disabled if disabled is not None else self._is_tracer_disabled()
is_service = service if service is not None else os.getenv("POWERTOOLS_SERVICE_NAME")
self._config["provider"] = provider if provider is not None else self._config["provider"]
diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py
index 8a552b53bcb..274cd96aace 100644
--- a/aws_lambda_powertools/utilities/parameters/base.py
+++ b/aws_lambda_powertools/utilities/parameters/base.py
@@ -7,7 +7,7 @@
from abc import ABC, abstractmethod
from collections import namedtuple
from datetime import datetime, timedelta
-from typing import Dict, Optional, Union
+from typing import Dict, Optional, Tuple, Union
from .exceptions import GetParameterError, TransformParameterError
@@ -31,6 +31,9 @@ def __init__(self):
self.store = {}
+ def _has_not_expired(self, key: Tuple[str, Optional[str]]) -> bool:
+ return key in self.store and self.store[key].ttl >= datetime.now()
+
def get(
self, name: str, max_age: int = DEFAULT_MAX_AGE_SECS, transform: Optional[str] = None, **sdk_options
) -> Union[str, list, dict, bytes]:
@@ -70,24 +73,26 @@ def get(
# an acceptable tradeoff.
key = (name, transform)
- if key not in self.store or self.store[key].ttl < datetime.now():
- try:
- value = self._get(name, **sdk_options)
- # Encapsulate all errors into a generic GetParameterError
- except Exception as exc:
- raise GetParameterError(str(exc))
+ if self._has_not_expired(key):
+ return self.store[key].value
+
+ try:
+ value = self._get(name, **sdk_options)
+ # Encapsulate all errors into a generic GetParameterError
+ except Exception as exc:
+ raise GetParameterError(str(exc))
- if transform is not None:
- value = transform_value(value, transform)
+ if transform is not None:
+ value = transform_value(value, transform)
- self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age),)
+ self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age),)
- return self.store[key].value
+ return value
@abstractmethod
def _get(self, name: str, **sdk_options) -> str:
"""
- Retrieve paramater value from the underlying parameter store
+ Retrieve parameter value from the underlying parameter store
"""
raise NotImplementedError()
@@ -129,29 +134,22 @@ def get_multiple(
key = (path, transform)
- if key not in self.store or self.store[key].ttl < datetime.now():
- try:
- values = self._get_multiple(path, **sdk_options)
- # Encapsulate all errors into a generic GetParameterError
- except Exception as exc:
- raise GetParameterError(str(exc))
+ if self._has_not_expired(key):
+ return self.store[key].value
- if transform is not None:
- new_values = {}
- for key, value in values.items():
- try:
- new_values[key] = transform_value(value, transform)
- except Exception as exc:
- if raise_on_transform_error:
- raise exc
- else:
- new_values[key] = None
+ try:
+ values: Dict[str, Union[str, bytes, dict, None]] = self._get_multiple(path, **sdk_options)
+ # Encapsulate all errors into a generic GetParameterError
+ except Exception as exc:
+ raise GetParameterError(str(exc))
- values = new_values
+ if transform is not None:
+ for (key, value) in values.items():
+ values[key] = transform_value(value, transform, raise_on_transform_error)
- self.store[key] = ExpirableValue(values, datetime.now() + timedelta(seconds=max_age),)
+ self.store[key] = ExpirableValue(values, datetime.now() + timedelta(seconds=max_age),)
- return self.store[key].value
+ return values
@abstractmethod
def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]:
@@ -161,16 +159,19 @@ def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]:
raise NotImplementedError()
-def transform_value(value: str, transform: str) -> Union[dict, bytes]:
+def transform_value(value: str, transform: str, raise_on_transform_error: bool = True) -> Union[dict, bytes, None]:
"""
Apply a transform to a value
Parameters
---------
value: str
- Parameter alue to transform
+ Parameter value to transform
transform: str
Type of transform, supported values are "json" and "binary"
+ raise_on_transform_error: bool, optional
+ Raises an exception if any transform fails, otherwise this will
+ return a None value for each transform that failed
Raises
------
@@ -187,4 +188,6 @@ def transform_value(value: str, transform: str) -> Union[dict, bytes]:
raise ValueError(f"Invalid transform type '{transform}'")
except Exception as exc:
- raise TransformParameterError(str(exc))
+ if raise_on_transform_error:
+ raise TransformParameterError(str(exc))
+ return None
diff --git a/aws_lambda_powertools/utilities/parameters/secrets.py b/aws_lambda_powertools/utilities/parameters/secrets.py
index ee4585309fe..67cb94c340b 100644
--- a/aws_lambda_powertools/utilities/parameters/secrets.py
+++ b/aws_lambda_powertools/utilities/parameters/secrets.py
@@ -77,7 +77,7 @@ def _get(self, name: str, **sdk_options) -> str:
----------
name: str
Name of the parameter
- sdk_options: dict
+ sdk_options: dict, optional
Dictionary of options that will be passed to the Secrets Manager get_secret_value API call
"""
diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py
index b458f8690d0..0f39bfac9c0 100644
--- a/aws_lambda_powertools/utilities/parameters/ssm.py
+++ b/aws_lambda_powertools/utilities/parameters/ssm.py
@@ -8,7 +8,7 @@
import boto3
from botocore.config import Config
-from .base import DEFAULT_PROVIDERS, BaseProvider
+from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider
class SSMProvider(BaseProvider):
@@ -86,6 +86,46 @@ def __init__(
super().__init__()
+ def get(
+ self,
+ name: str,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ transform: Optional[str] = None,
+ decrypt: bool = False,
+ **sdk_options
+ ) -> Union[str, list, dict, bytes]:
+ """
+ Retrieve a parameter value or return the cached value
+
+ Parameters
+ ----------
+ name: str
+ Parameter name
+ max_age: int
+ Maximum age of the cached value
+ transform: str
+ Optional transformation of the parameter value. Supported values
+ are "json" for JSON strings and "binary" for base 64 encoded
+ values.
+ decrypt: bool, optional
+ If the parameter value should be decrypted
+ sdk_options: dict, optional
+ Arguments that will be passed directly to the underlying API call
+
+ Raises
+ ------
+ GetParameterError
+ When the parameter provider fails to retrieve a parameter value for
+ a given name.
+ TransformParameterError
+ When the parameter provider fails to transform a parameter value.
+ """
+
+ # Add to `decrypt` sdk_options to we can have an explicit option for this
+ sdk_options["decrypt"] = decrypt
+
+ return super().get(name, max_age, transform, **sdk_options)
+
def _get(self, name: str, decrypt: bool = False, **sdk_options) -> str:
"""
Retrieve a parameter value from AWS Systems Manager Parameter Store
@@ -144,7 +184,9 @@ def _get_multiple(self, path: str, decrypt: bool = False, recursive: bool = Fals
return parameters
-def get_parameter(name: str, transform: Optional[str] = None, **sdk_options) -> Union[str, list, dict, bytes]:
+def get_parameter(
+ name: str, transform: Optional[str] = None, decrypt: bool = False, **sdk_options
+) -> Union[str, list, dict, bytes]:
"""
Retrieve a parameter value from AWS Systems Manager (SSM) Parameter Store
@@ -154,6 +196,8 @@ def get_parameter(name: str, transform: Optional[str] = None, **sdk_options) ->
Name of the parameter
transform: str, optional
Transforms the content from a JSON object ('json') or base64 binary string ('binary')
+ decrypt: bool, optional
+ If the parameter values should be decrypted
sdk_options: dict, optional
Dictionary of options that will be passed to the Parameter Store get_parameter API call
@@ -190,7 +234,10 @@ def get_parameter(name: str, transform: Optional[str] = None, **sdk_options) ->
if "ssm" not in DEFAULT_PROVIDERS:
DEFAULT_PROVIDERS["ssm"] = SSMProvider()
- return DEFAULT_PROVIDERS["ssm"].get(name, transform=transform)
+ # Add to `decrypt` sdk_options to we can have an explicit option for this
+ sdk_options["decrypt"] = decrypt
+
+ return DEFAULT_PROVIDERS["ssm"].get(name, transform=transform, **sdk_options)
def get_parameters(
@@ -205,10 +252,10 @@ def get_parameters(
Path to retrieve the parameters
transform: str, optional
Transforms the content from a JSON object ('json') or base64 binary string ('binary')
- decrypt: bool, optional
- If the parameter values should be decrypted
recursive: bool, optional
If this should retrieve the parameter values recursively or not, defaults to True
+ decrypt: bool, optional
+ If the parameter values should be decrypted
sdk_options: dict, optional
Dictionary of options that will be passed to the Parameter Store get_parameters_by_path API call
@@ -245,4 +292,7 @@ def get_parameters(
if "ssm" not in DEFAULT_PROVIDERS:
DEFAULT_PROVIDERS["ssm"] = SSMProvider()
- return DEFAULT_PROVIDERS["ssm"].get_multiple(path, transform=transform, recursive=recursive, decrypt=decrypt)
+ sdk_options["recursive"] = recursive
+ sdk_options["decrypt"] = decrypt
+
+ return DEFAULT_PROVIDERS["ssm"].get_multiple(path, transform=transform, **sdk_options)
diff --git a/docs/content/core/logger.mdx b/docs/content/core/logger.mdx
index 225fb401cf1..618d7a78435 100644
--- a/docs/content/core/logger.mdx
+++ b/docs/content/core/logger.mdx
@@ -58,7 +58,7 @@ Key | Type | Example | Description
**sampling_rate** | int | 0.1 | Debug logging sampling rate in percentage e.g. 1% in this case
**message** | any | "Collecting payment" | Log statement value. Unserializable JSON values will be casted to string
-## Capturing context Lambda info
+## Capturing Lambda context info
You can enrich your structured logs with key Lambda context information via `inject_lambda_context`.
@@ -79,22 +79,6 @@ def handler(event, context):
...
```
-You can also explicitly log any incoming event using `log_event` param or via `POWERTOOLS_LOGGER_LOG_EVENT` env var.
-
-
- This is disabled by default to prevent sensitive info being logged.
-
-
-```python:title=log_handler_event.py
-from aws_lambda_powertools import Logger
-
-logger = Logger()
-
-@logger.inject_lambda_context(log_event=True) # highlight-start
-def handler(event, context):
- ...
-```
-
When used, this will include the following keys:
Key | Type | Example
@@ -145,6 +129,23 @@ Key | Type | Example
}
```
+
+
+You can also explicitly log any incoming event using `log_event` param or via `POWERTOOLS_LOGGER_LOG_EVENT` env var.
+
+
+ This is disabled by default to prevent sensitive info being logged.
+
+
+```python:title=log_handler_event.py
+from aws_lambda_powertools import Logger
+
+logger = Logger()
+
+@logger.inject_lambda_context(log_event=True) # highlight-line
+def handler(event, context):
+ ...
+```
## Appending additional keys
@@ -222,7 +223,7 @@ If you ever forget to use `child` param, we will return an existing `Logger` wit
You can dynamically set a percentage of your logs to **DEBUG** level using `sample_rate` param or via env var `POWERTOOLS_LOGGER_SAMPLE_RATE`.
-This happens on an entire request basis, and DEBUG level is set at the constructor. That means, concurrent requests or infrequent invocations are more likely to occur as [new Lambda execution contexts are created](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), not reused.
+Sampling calculation happens at the Logger class initialization. This means, when configured it, sampling it's more likely to happen during concurrent requests, or infrequent invocations as [new Lambda execution contexts are created](https://docs.aws.amazon.com/lambda/latest/dg/runtimes-context.html), not reused.
If you want this logic to happen on every invocation regardless whether Lambda reuses the execution environment or not, then create your Logger inside your Lambda handler.
diff --git a/docs/content/core/metrics.mdx b/docs/content/core/metrics.mdx
index 7265d9b8a50..9b750c28622 100644
--- a/docs/content/core/metrics.mdx
+++ b/docs/content/core/metrics.mdx
@@ -5,7 +5,9 @@ description: Core utility
import Note from "../../src/components/Note"
-Metrics creates custom metrics asynchronously via logging metrics to standard output following Amazon CloudWatch Embedded Metric Format (EMF).
+Metrics creates custom metrics asynchronously by logging metrics to standard output following Amazon CloudWatch Embedded Metric Format (EMF).
+
+These metrics can be visualized through [Amazon CloudWatch Console](https://console.aws.amazon.com/cloudwatch/).
**Key features**
@@ -32,7 +34,9 @@ Resources:
```
We recommend you use your application or main service as a metric namespace.
-You can explicitly set a namespace name via `namespace` param or via `POWERTOOLS_METRICS_NAMESPACE` env var. This sets **namespace** key that will be used for all metrics.
+You can explicitly set a namespace name via `namespace` param or via `POWERTOOLS_METRICS_NAMESPACE` env var.
+
+This sets **namespace** key that will be used for all metrics.
You can also pass a service name via `service` param or `POWERTOOLS_SERVICE_NAME` env var. This will create a dimension with the service name.
```python:title=app.py
@@ -67,7 +71,7 @@ metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1)
`MetricUnit` enum facilitate finding a supported metric unit by CloudWatch. Alternatively, you can pass the value as a string if you already know them e.g. "Count".
-CloudWatch EMF supports a max of 100 metrics. Metrics will automatically flush all metrics when adding the 100th metric, where subsequent metrics will be aggregated into a new EMF object.
+CloudWatch EMF supports a max of 100 metrics. Metrics utility will flush all metrics when adding the 100th metric while subsequent metrics will be aggregated into a new EMF object, for your convenience.
## Creating a metric with a different dimension
@@ -238,7 +242,12 @@ def lambda_handler(evt, ctx):
...
```
-If it's a cold start, this feature will add a metric named `ColdStart` and a dimension named `function_name`.
+If it's a cold start invocation, this feature will:
+
+* Create a separate EMF blob solely containing a metric named `ColdStart`
+* Add `function_name` and `service` dimensions
+
+This has the advantage of keeping cold start metric separate from your application metrics.
## Testing your code
diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx
index 677ad4ccae0..4042d51a861 100644
--- a/docs/content/core/tracer.mdx
+++ b/docs/content/core/tracer.mdx
@@ -52,7 +52,7 @@ You can trace your Lambda function handler via `capture_lambda_handler`.
When using this decorator, Tracer performs these additional tasks to ease operations:
* Creates a `ColdStart` annotation to easily filter traces that have had an initialization overhead
-* Adds any response, or full exceptions generated by the handler as metadata
+* Captures any response, or full exceptions generated by the handler, and include as tracing metadata
```python:title=lambda_handler.py
from aws_lambda_powertools import Tracer
@@ -65,6 +65,20 @@ def handler(event, context):
...
```
+
+ Returning sensitive information from your Lambda handler or functions, where Tracer is used?
+
+ You can disable Tracer from capturing their responses as tracing metadata with capture_response=False
parameter in both capture_lambda_handler and capture_method decorators.
+
+
+```python:title=do_not_capture_response_as_metadata.py
+# Disables Tracer from capturing response and adding as metadata
+# Useful when dealing with sensitive data
+@tracer.capture_lambda_handler(capture_response=False) # highlight-line
+def handler(event, context):
+ return "sensitive_information"
+```
+
### Annotations
Annotations are key-values indexed by AWS X-Ray on a per trace basis. You can use them to filter traces as well as to create [Trace Groups](https://aws.amazon.com/about-aws/whats-new/2018/11/aws-xray-adds-the-ability-to-group-traces/).
@@ -108,7 +122,10 @@ def collect_payment(charge_id):
ret = requests.post(PAYMENT_ENDPOINT) # logic
tracer.put_annotation("PAYMENT_STATUS", "SUCCESS") # custom annotation
return ret
-...
+
+@tracer.capture_method(capture_response=False) # highlight-line
+def sensitive_information_to_be_processed():
+ return "sensitive_information"
```
## Asynchronous and generator functions
@@ -157,7 +174,23 @@ def handler(evt, ctx): # highlight-line
another_result = list(collect_payment_gen())
```
-## Tracing aiohttp requests
+## Patching modules
+
+Tracer automatically patches all [supported libraries by X-Ray](https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-patching.html) during initialization, by default. Underneath, AWS X-Ray SDK checks whether a supported library has been imported before patching.
+
+If you're looking to shave a few microseconds, or milliseconds depending on your function memory configuration, you can patch specific modules using `patch_modules` param:
+
+```python:title=app.py
+import boto3
+import requests
+
+from aws_lambda_powertools import Tracer
+
+modules_to_be_patched = ["boto3", "requests"]
+tracer = Tracer(patch_modules=modules_to_be_patched) # highlight-line
+```
+
+### Tracing aiohttp requests
This snippet assumes you have aiohttp as a dependency
@@ -186,7 +219,7 @@ You can use `tracer.provider` attribute to access all methods provided by AWS X-
This is useful when you need a feature available in X-Ray that is not available in the Tracer utility, for example [thread-safe](https://github.com/aws/aws-xray-sdk-python/#user-content-trace-threadpoolexecutor), or [context managers](https://github.com/aws/aws-xray-sdk-python/#user-content-start-a-custom-segmentsubsegment).
-## Concurrent asynchronous functions
+### Concurrent asynchronous functions
As of now, X-Ray SDK will raise an exception when async functions are run and traced concurrently.
diff --git a/docs/content/index.mdx b/docs/content/index.mdx
index e5c2688ecc7..26ab367ba4c 100644
--- a/docs/content/index.mdx
+++ b/docs/content/index.mdx
@@ -3,8 +3,15 @@ title: Homepage
description: AWS Lambda Powertools Python
---
+import Note from "../src/components/Note"
+
Powertools is a suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, structured logging and creating custom metrics asynchronously easier.
+
+ Looking for a quick run through of the core utilities?
+ Check out this detailed blog post with a practical example.
+
+
## Install
Powertools is available in PyPi. You can use your favourite dependency management tool to install it
@@ -12,24 +19,40 @@ Powertools is available in PyPi. You can use your favourite dependency managemen
* [poetry](https://python-poetry.org/): `poetry add aws-lambda-powertools`
* [pip](https://pip.pypa.io/en/latest/index.html): `pip install aws-lambda-powertools`
-## Features
+**Quick hello world example using SAM CLI**
-* [Tracing](./core/tracer) - Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions
-* [Logging](./core/logger) - Structured logging made easier, and decorator to enrich structured logging with key Lambda context details
-* [Metrics](./core/metrics) - Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF)
-* [Bring your own middleware](./utilities/middleware_factory) - Decorator factory to create your own middleware to run logic before, and after each Lambda invocation
-* [Parameters utility](./utilities/parameters) - Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time
+```bash:title=hello_world.sh
+sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python
+```
-## Tenets
+### Lambda Layer
-* **AWS Lambda only** – We optimise for AWS Lambda function environments and supported runtimes only. Utilities might work with web frameworks and non-Lambda environments, though they are not officially supported.
-* **Eases the adoption of best practices** – The main priority of the utilities is to facilitate best practices adoption, as defined in the AWS Well-Architected Serverless Lens; all other functionality is optional.
-* **Keep it lean** – Additional dependencies are carefully considered for security and ease of maintenance, and prevent negatively impacting startup time.
-* **We strive for backwards compatibility** – New features and changes should keep backwards compatibility. If a breaking change cannot be avoided, the deprecation and migration process should be clearly defined.
-* **We work backwards from the community** – We aim to strike a balance of what would work best for 80% of customers. Emerging practices are considered and discussed via Requests for Comment (RFCs)
-* **Idiomatic** – Utilities follow programming language idioms and language-specific best practices.
+Powertools is also available as a Lambda Layer. It is distributed via the [AWS Serverless Application Repository (SAR)](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/what-is-serverlessrepo.html).
-_`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary across languages._
+App | ARN
+----------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------
+[aws-lambda-powertools-python-layer](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer) | arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer
+
+If using SAM, you can include this SAR App as part of your shared Layers stack, and lock to a specific semantic version. Once deployed, it'll be available across the account this is deployed to.
+
+```yaml
+ AwsLambdaPowertoolsPythonLayer:
+ Type: AWS::Serverless::Application
+ Properties:
+ Location:
+ ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer
+ SemanticVersion: 1.3.1 # change to latest semantic version available in SAR
+```
+
+## Features
+
+Utility | Description
+------------------------------------------------- | ---------------------------------------------------------------------------------
+[Tracing](./core/tracer) | Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions
+[Logging](./core/logger) | Structured logging made easier, and decorator to enrich structured logging with key Lambda context details
+[Metrics](./core/metrics) | Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF)
+[Bring your own middleware](.//utilities/middleware_factory) | Decorator factory to create your own middleware to run logic before, and after each Lambda invocation
+[Parameters utility](./utilities/parameters) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time
## Environment variables
@@ -37,10 +60,10 @@ _`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary
Environment variable | Description | Utility
------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | -------------------------------------------------
-**POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | all
+**POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All
**POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics)
**POWERTOOLS_TRACE_DISABLED** | Disables tracing | [Tracing](./core/tracer)
-**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [middleware_factory](./utilities/middleware_factory)
+**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory)
**POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger)
**POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger)
**LOG_LEVEL** | Sets logging level | [Logging](./core/logger)
@@ -54,3 +77,14 @@ from aws_lambda_powertools.logging.logger import set_package_logger
set_package_logger()
```
+
+## Tenets
+
+* **AWS Lambda only** – We optimise for AWS Lambda function environments and supported runtimes only. Utilities might work with web frameworks and non-Lambda environments, though they are not officially supported.
+* **Eases the adoption of best practices** – The main priority of the utilities is to facilitate best practices adoption, as defined in the AWS Well-Architected Serverless Lens; all other functionality is optional.
+* **Keep it lean** – Additional dependencies are carefully considered for security and ease of maintenance, and prevent negatively impacting startup time.
+* **We strive for backwards compatibility** – New features and changes should keep backwards compatibility. If a breaking change cannot be avoided, the deprecation and migration process should be clearly defined.
+* **We work backwards from the community** – We aim to strike a balance of what would work best for 80% of customers. Emerging practices are considered and discussed via Requests for Comment (RFCs)
+* **Idiomatic** – Utilities follow programming language idioms and language-specific best practices.
+
+_`*` Core utilities are Tracer, Logger and Metrics. Optional utilities may vary across languages._
diff --git a/docs/package-lock.json b/docs/package-lock.json
index 4ca6ae53d77..107a07f81a8 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -6949,9 +6949,9 @@
"integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4="
},
"dot-prop": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz",
- "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.1.tgz",
+ "integrity": "sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ==",
"requires": {
"is-obj": "^1.0.0"
}
@@ -18247,9 +18247,12 @@
}
},
"serialize-javascript": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-2.1.2.tgz",
- "integrity": "sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ=="
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz",
+ "integrity": "sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==",
+ "requires": {
+ "randombytes": "^2.1.0"
+ }
},
"serve-index": {
"version": "1.9.1",
@@ -19666,15 +19669,15 @@
}
},
"terser-webpack-plugin": {
- "version": "1.4.3",
- "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz",
- "integrity": "sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA==",
+ "version": "1.4.5",
+ "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz",
+ "integrity": "sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==",
"requires": {
"cacache": "^12.0.2",
"find-cache-dir": "^2.1.0",
"is-wsl": "^1.1.0",
"schema-utils": "^1.0.0",
- "serialize-javascript": "^2.1.2",
+ "serialize-javascript": "^4.0.0",
"source-map": "^0.6.1",
"terser": "^4.1.2",
"webpack-sources": "^1.4.0",
@@ -20365,9 +20368,9 @@
"integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4="
},
"dot-prop": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz",
- "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.1.tgz",
+ "integrity": "sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ==",
"requires": {
"is-obj": "^1.0.0"
}
diff --git a/pyproject.toml b/pyproject.toml
index 0cfd9c45bed..240ae4ed84d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "aws_lambda_powertools"
-version = "1.3.1"
+version = "1.4.0"
description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric"
authors = ["Amazon Web Services"]
classifiers=[
diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py
index 3407441a7bc..4d092131988 100644
--- a/tests/functional/test_metrics.py
+++ b/tests/functional/test_metrics.py
@@ -7,6 +7,7 @@
from aws_lambda_powertools import Metrics, single_metric
from aws_lambda_powertools.metrics import MetricUnit, MetricUnitError, MetricValueError, SchemaValidationError
+from aws_lambda_powertools.metrics import metrics as metrics_global
from aws_lambda_powertools.metrics.base import MetricManager
@@ -14,6 +15,7 @@
def reset_metric_set():
metrics = Metrics()
metrics.clear_metrics()
+ metrics_global.is_cold_start = True # ensure each test has cold start
yield
@@ -112,6 +114,10 @@ def capture_metrics_output(capsys):
return json.loads(capsys.readouterr().out.strip())
+def capture_metrics_output_multiple_emf_objects(capsys):
+ return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line]
+
+
def test_single_metric_logs_one_metric_only(capsys, metric, dimension, namespace):
# GIVEN we try adding more than one metric
# WHEN using single_metric context manager
@@ -495,7 +501,7 @@ def lambda_handler(evt, context):
LambdaContext = namedtuple("LambdaContext", "function_name")
lambda_handler({}, LambdaContext("example_fn"))
- _ = capture_metrics_output(capsys) # ignore first stdout captured
+ _, _ = capture_metrics_output_multiple_emf_objects(capsys) # ignore first stdout captured
# THEN ColdStart metric and function_name dimension should be logged once
lambda_handler({}, LambdaContext("example_fn"))
@@ -630,3 +636,38 @@ def test_serialize_metric_set_metric_definition(metric, dimension, namespace, se
assert "Timestamp" in metric_definition_output["_aws"]
remove_timestamp(metrics=[metric_definition_output, expected_metric_definition])
assert metric_definition_output == expected_metric_definition
+
+
+def test_log_metrics_capture_cold_start_metric_separately(capsys, namespace, service, metric, dimension):
+ # GIVEN Metrics is initialized
+ my_metrics = Metrics(service=service, namespace=namespace)
+
+ # WHEN log_metrics is used with capture_cold_start_metric
+ @my_metrics.log_metrics(capture_cold_start_metric=True)
+ def lambda_handler(evt, context):
+ my_metrics.add_metric(**metric)
+ my_metrics.add_dimension(**dimension)
+
+ LambdaContext = namedtuple("LambdaContext", "function_name")
+ lambda_handler({}, LambdaContext("example_fn"))
+
+ cold_start_blob, custom_metrics_blob = capture_metrics_output_multiple_emf_objects(capsys)
+
+ # THEN ColdStart metric and function_name dimension should be logged
+ # in a separate EMF blob than the application metrics
+ assert cold_start_blob["ColdStart"] == 1
+ assert cold_start_blob["function_name"] == "example_fn"
+ assert cold_start_blob["service"] == service
+
+ # and that application metrics dimensions are not part of ColdStart EMF blob
+ assert "test_dimension" not in cold_start_blob
+
+ # THEN application metrics EMF blob should not have
+ # ColdStart metric nor function_name dimension
+ assert "function_name" not in custom_metrics_blob
+ assert "ColdStart" not in custom_metrics_blob
+
+ # and that application metrics are recorded as normal
+ assert custom_metrics_blob["service"] == service
+ assert custom_metrics_blob["single_metric"] == metric["value"]
+ assert custom_metrics_blob["test_dimension"] == dimension["value"]
diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py
index 7a0677b2197..abd121540a6 100644
--- a/tests/functional/test_utilities_parameters.py
+++ b/tests/functional/test_utilities_parameters.py
@@ -1310,6 +1310,7 @@ def test_get_parameter_new(monkeypatch, mock_name, mock_value):
class TestProvider(BaseProvider):
def _get(self, name: str, **kwargs) -> str:
assert name == mock_name
+ assert not kwargs["decrypt"]
return mock_value
def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
@@ -1355,6 +1356,8 @@ def _get(self, name: str, **kwargs) -> str:
def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
assert path == mock_name
+ assert kwargs["recursive"]
+ assert not kwargs["decrypt"]
return mock_value
monkeypatch.setattr(parameters.ssm, "DEFAULT_PROVIDERS", {})
@@ -1468,3 +1471,13 @@ def test_transform_value_wrong(mock_value):
parameters.base.transform_value(mock_value, "INCORRECT")
assert "Invalid transform type" in str(excinfo)
+
+
+def test_transform_value_ignore_error(mock_value):
+ """
+ Test transform_value() does not raise errors when raise_on_transform_error is False
+ """
+
+ value = parameters.base.transform_value(mock_value, "INCORRECT", raise_on_transform_error=False)
+
+ assert value is None
diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py
index 16c476ee0fc..3e2492b9e15 100644
--- a/tests/unit/test_tracing.py
+++ b/tests/unit/test_tracing.py
@@ -179,10 +179,9 @@ def test_tracer_no_autopatch(patch_mock):
assert patch_mock.call_count == 0
-def test_tracer_lambda_handler_does_not_add_empty_response_as_metadata(mocker, provider_stub):
+def test_tracer_lambda_handler_does_not_add_empty_response_as_metadata(mocker, provider_stub, in_subsegment_mock):
# GIVEN tracer is initialized
- put_metadata_mock = mocker.MagicMock()
- provider = provider_stub(put_metadata_mock=put_metadata_mock)
+ provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment)
tracer = Tracer(provider=provider)
# WHEN capture_lambda_handler decorator is used
@@ -194,13 +193,12 @@ def handler(event, context):
handler({}, mocker.MagicMock())
# THEN we should not add empty metadata
- assert put_metadata_mock.call_count == 0
+ assert in_subsegment_mock.put_metadata.call_count == 0
-def test_tracer_method_does_not_add_empty_response_as_metadata(mocker, provider_stub):
+def test_tracer_method_does_not_add_empty_response_as_metadata(mocker, provider_stub, in_subsegment_mock):
# GIVEN tracer is initialized
- put_metadata_mock = mocker.MagicMock()
- provider = provider_stub(put_metadata_mock=put_metadata_mock)
+ provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment)
tracer = Tracer(provider=provider)
# WHEN capture_method decorator is used
@@ -212,7 +210,7 @@ def greeting(name, message):
greeting(name="Foo", message="Bar")
# THEN we should not add empty metadata
- assert put_metadata_mock.call_count == 0
+ assert in_subsegment_mock.put_metadata.call_count == 0
@mock.patch("aws_lambda_powertools.tracing.tracer.aws_xray_sdk.core.patch")
@@ -502,3 +500,37 @@ def generator_fn():
assert put_metadata_mock_args["namespace"] == "booking"
assert isinstance(put_metadata_mock_args["value"], ValueError)
assert str(put_metadata_mock_args["value"]) == "test"
+
+
+def test_tracer_lambda_handler_does_not_add_response_as_metadata(mocker, provider_stub, in_subsegment_mock):
+ # GIVEN tracer is initialized
+ provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment)
+ tracer = Tracer(provider=provider, auto_patch=False)
+
+ # WHEN capture_lambda_handler decorator is used
+ # and the handler response is empty
+ @tracer.capture_lambda_handler(capture_response=False)
+ def handler(event, context):
+ return "response"
+
+ handler({}, mocker.MagicMock())
+
+ # THEN we should not add any metadata
+ assert in_subsegment_mock.put_metadata.call_count == 0
+
+
+def test_tracer_method_does_not_add_response_as_metadata(mocker, provider_stub, in_subsegment_mock):
+ # GIVEN tracer is initialized
+ provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment)
+ tracer = Tracer(provider=provider, auto_patch=False)
+
+ # WHEN capture_method decorator is used
+ # and the method response is empty
+ @tracer.capture_method(capture_response=False)
+ def greeting(name, message):
+ return "response"
+
+ greeting(name="Foo", message="Bar")
+
+ # THEN we should not add any metadata
+ assert in_subsegment_mock.put_metadata.call_count == 0