diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6a77b1f6bda..106d0ada40c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,26 @@
# Unreleased
+## Documentation
+
+* **homepage:** remove v1 layer limitation on pydantic not being included
+* **tracer:** add note on why X-Ray SDK over ADOT closes [#1675](https://github.com/awslabs/aws-lambda-powertools-python/issues/1675)
+
+## Features
+
+* **metrics:** add EphemeralMetrics as a non-singleton option ([#1676](https://github.com/awslabs/aws-lambda-powertools-python/issues/1676))
+* **parameters:** add get_parameters_by_name for SSM params in distinct paths ([#1678](https://github.com/awslabs/aws-lambda-powertools-python/issues/1678))
+
+## Maintenance
+
+* **deps-dev:** bump aws-cdk-lib from 2.49.0 to 2.50.0 ([#1683](https://github.com/awslabs/aws-lambda-powertools-python/issues/1683))
+* **deps-dev:** bump mypy-boto3-dynamodb from 1.25.0 to 1.26.0.post1 ([#1682](https://github.com/awslabs/aws-lambda-powertools-python/issues/1682))
+* **deps-dev:** bump mypy-boto3-cloudformation from 1.25.0 to 1.26.0.post1 ([#1679](https://github.com/awslabs/aws-lambda-powertools-python/issues/1679))
+* **package:** correct pyproject version manually
+
+
+
+## [v2.1.0] - 2022-10-31
## Bug Fixes
* **ci:** linting issues after flake8-blackbear,mypy upgrades
@@ -30,6 +50,7 @@
## Maintenance
+* update v2 layer ARN on documentation
* **ci:** fix typo on version description
* **deps:** bump peaceiris/actions-gh-pages from 3.8.0 to 3.9.0 ([#1649](https://github.com/awslabs/aws-lambda-powertools-python/issues/1649))
* **deps:** bump docker/setup-qemu-action from 2.0.0 to 2.1.0 ([#1627](https://github.com/awslabs/aws-lambda-powertools-python/issues/1627))
@@ -2531,7 +2552,8 @@
* Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38
-[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.0.0...HEAD
+[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.1.0...HEAD
+[v2.1.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v2.0.0...v2.1.0
[v2.0.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.31.1...v2.0.0
[v1.31.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.31.0...v1.31.1
[v1.31.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.30.0...v1.31.0
diff --git a/aws_lambda_powertools/metrics/__init__.py b/aws_lambda_powertools/metrics/__init__.py
index 7379dad8b88..3315899da0b 100644
--- a/aws_lambda_powertools/metrics/__init__.py
+++ b/aws_lambda_powertools/metrics/__init__.py
@@ -3,10 +3,11 @@
from .base import MetricUnit
from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError
from .metric import single_metric
-from .metrics import Metrics
+from .metrics import EphemeralMetrics, Metrics
__all__ = [
"Metrics",
+ "EphemeralMetrics",
"single_metric",
"MetricUnit",
"MetricUnitError",
diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py
index 29a780d0af1..b032d181811 100644
--- a/aws_lambda_powertools/metrics/base.py
+++ b/aws_lambda_powertools/metrics/base.py
@@ -1,11 +1,14 @@
import datetime
+import functools
import json
import logging
import numbers
import os
+import warnings
from collections import defaultdict
+from contextlib import contextmanager
from enum import Enum
-from typing import Any, Dict, List, Optional, Union
+from typing import Any, Callable, Dict, Generator, List, Optional, Union
from ..shared import constants
from ..shared.functions import resolve_env_var_choice
@@ -16,6 +19,8 @@
MAX_METRICS = 100
MAX_DIMENSIONS = 29
+is_cold_start = True
+
class MetricUnit(Enum):
Seconds = "Seconds"
@@ -86,9 +91,9 @@ def __init__(
self.dimension_set = dimension_set if dimension_set is not None else {}
self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV))
self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV))
+ self.metadata_set = metadata_set if metadata_set is not None else {}
self._metric_units = [unit.value for unit in MetricUnit]
self._metric_unit_options = list(MetricUnit.__members__)
- self.metadata_set = metadata_set if metadata_set is not None else {}
def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None:
"""Adds given metric
@@ -120,7 +125,7 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> N
if not isinstance(value, numbers.Number):
raise MetricValueError(f"{value} is not a valid number")
- unit = self.__extract_metric_unit_value(unit=unit)
+ unit = self._extract_metric_unit_value(unit=unit)
metric: Dict = self.metric_set.get(name, defaultdict(list))
metric["Unit"] = unit
metric["Value"].append(float(value))
@@ -179,7 +184,7 @@ def serialize_metric_set(
if self.service and not self.dimension_set.get("service"):
# self.service won't be a float
- self.add_dimension(name="service", value=self.service) # type: ignore[arg-type]
+ self.add_dimension(name="service", value=self.service)
if len(metrics) == 0:
raise SchemaValidationError("Must contain at least one metric.")
@@ -274,7 +279,86 @@ def add_metadata(self, key: str, value: Any) -> None:
else:
self.metadata_set[str(key)] = value
- def __extract_metric_unit_value(self, unit: Union[str, MetricUnit]) -> str:
+ def clear_metrics(self) -> None:
+ logger.debug("Clearing out existing metric set from memory")
+ self.metric_set.clear()
+ self.dimension_set.clear()
+ self.metadata_set.clear()
+
+ def log_metrics(
+ self,
+ lambda_handler: Union[Callable[[Dict, Any], Any], Optional[Callable[[Dict, Any, Optional[Dict]], Any]]] = None,
+ capture_cold_start_metric: bool = False,
+ raise_on_empty_metrics: bool = False,
+ default_dimensions: Optional[Dict[str, str]] = None,
+ ):
+ """Decorator to serialize and publish metrics at the end of a function execution.
+
+ Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler).
+
+ Example
+ -------
+ **Lambda function using tracer and metrics decorators**
+
+ from aws_lambda_powertools import Metrics, Tracer
+
+ metrics = Metrics(service="payment")
+ tracer = Tracer(service="payment")
+
+ @tracer.capture_lambda_handler
+ @metrics.log_metrics
+ def handler(event, context):
+ ...
+
+ Parameters
+ ----------
+ lambda_handler : Callable[[Any, Any], Any], optional
+ lambda function handler, by default None
+ capture_cold_start_metric : bool, optional
+ captures cold start metric, by default False
+ raise_on_empty_metrics : bool, optional
+ raise exception if no metrics are emitted, by default False
+ default_dimensions: Dict[str, str], optional
+ metric dimensions as key=value that will always be present
+
+ Raises
+ ------
+ e
+ Propagate error received
+ """
+
+ # If handler is None we've been called with parameters
+ # Return a partial function with args filled
+ if lambda_handler is None:
+ logger.debug("Decorator called with parameters")
+ return functools.partial(
+ self.log_metrics,
+ capture_cold_start_metric=capture_cold_start_metric,
+ raise_on_empty_metrics=raise_on_empty_metrics,
+ default_dimensions=default_dimensions,
+ )
+
+ @functools.wraps(lambda_handler)
+ def decorate(event, context):
+ try:
+ if default_dimensions:
+ self.set_default_dimensions(**default_dimensions)
+ response = lambda_handler(event, context)
+ if capture_cold_start_metric:
+ self._add_cold_start_metric(context=context)
+ finally:
+ if not raise_on_empty_metrics and not self.metric_set:
+ warnings.warn("No metrics to publish, skipping")
+ else:
+ metrics = self.serialize_metric_set()
+ self.clear_metrics()
+ print(json.dumps(metrics, separators=(",", ":")))
+
+ return response
+
+ return decorate
+
+ def _extract_metric_unit_value(self, unit: Union[str, MetricUnit]) -> str:
"""Return metric value from metric unit whether that's str or MetricUnit enum
Parameters
@@ -306,3 +390,139 @@ def __extract_metric_unit_value(self, unit: Union[str, MetricUnit]) -> str:
unit = unit.value
return unit
+
+ def _add_cold_start_metric(self, context: Any) -> None:
+ """Add cold start metric and function_name dimension
+
+ Parameters
+ ----------
+ context : Any
+ Lambda context
+ """
+ global is_cold_start
+ if is_cold_start:
+ logger.debug("Adding cold start metric and function_name dimension")
+ with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace=self.namespace) as metric:
+ metric.add_dimension(name="function_name", value=context.function_name)
+ if self.service:
+ metric.add_dimension(name="service", value=str(self.service))
+ is_cold_start = False
+
+
+class SingleMetric(MetricManager):
+ """SingleMetric creates an EMF object with a single metric.
+
+ EMF specification doesn't allow metrics with different dimensions.
+ SingleMetric overrides MetricManager's add_metric method to do just that.
+
+ Use `single_metric` when you need to create metrics with different dimensions,
+ otherwise `aws_lambda_powertools.metrics.metrics.Metrics` is
+ a more cost effective option
+
+ Environment variables
+ ---------------------
+ POWERTOOLS_METRICS_NAMESPACE : str
+ metric namespace
+
+ Example
+ -------
+ **Creates cold start metric with function_version as dimension**
+
+ import json
+ from aws_lambda_powertools.metrics import single_metric, MetricUnit
+ metric = single_metric(namespace="ServerlessAirline")
+
+ metric.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1)
+ metric.add_dimension(name="function_version", value=47)
+
+ print(json.dumps(metric.serialize_metric_set(), indent=4))
+
+ Parameters
+ ----------
+ MetricManager : MetricManager
+ Inherits from `aws_lambda_powertools.metrics.base.MetricManager`
+ """
+
+ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None:
+ """Method to prevent more than one metric being created
+
+ Parameters
+ ----------
+ name : str
+ Metric name (e.g. BookingConfirmation)
+ unit : MetricUnit
+ Metric unit (e.g. "Seconds", MetricUnit.Seconds)
+ value : float
+ Metric value
+ """
+ if len(self.metric_set) > 0:
+ logger.debug(f"Metric {name} already set, skipping...")
+ return
+ return super().add_metric(name, unit, value)
+
+
+@contextmanager
+def single_metric(
+ name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None
+) -> Generator[SingleMetric, None, None]:
+ """Context manager to simplify creation of a single metric
+
+ Example
+ -------
+ **Creates cold start metric with function_version as dimension**
+
+ from aws_lambda_powertools import single_metric
+ from aws_lambda_powertools.metrics import MetricUnit
+
+ with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ServerlessAirline") as metric:
+ metric.add_dimension(name="function_version", value="47")
+
+ **Same as above but set namespace using environment variable**
+
+ $ export POWERTOOLS_METRICS_NAMESPACE="ServerlessAirline"
+
+ from aws_lambda_powertools import single_metric
+ from aws_lambda_powertools.metrics import MetricUnit
+
+ with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric:
+ metric.add_dimension(name="function_version", value="47")
+
+ Parameters
+ ----------
+ name : str
+ Metric name
+ unit : MetricUnit
+ `aws_lambda_powertools.helper.models.MetricUnit`
+ value : float
+ Metric value
+ namespace: str
+ Namespace for metrics
+
+ Yields
+ -------
+ SingleMetric
+ SingleMetric class instance
+
+ Raises
+ ------
+ MetricUnitError
+ When metric metric isn't supported by CloudWatch
+ MetricValueError
+ When metric value isn't a number
+ SchemaValidationError
+ When metric object fails EMF schema validation
+ """
+ metric_set: Optional[Dict] = None
+ try:
+ metric: SingleMetric = SingleMetric(namespace=namespace)
+ metric.add_metric(name=name, unit=unit, value=value)
+ yield metric
+ metric_set = metric.serialize_metric_set()
+ finally:
+ print(json.dumps(metric_set, separators=(",", ":")))
+
+
+def reset_cold_start_flag():
+ global is_cold_start
+ if not is_cold_start:
+ is_cold_start = True
diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py
index 94b427738a1..5465889f1f0 100644
--- a/aws_lambda_powertools/metrics/metric.py
+++ b/aws_lambda_powertools/metrics/metric.py
@@ -1,121 +1,4 @@
-import json
-import logging
-from contextlib import contextmanager
-from typing import Dict, Generator, Optional, Union
+# NOTE: prevents circular inheritance import
+from .base import SingleMetric, single_metric
-from .base import MetricManager, MetricUnit
-
-logger = logging.getLogger(__name__)
-
-
-class SingleMetric(MetricManager):
- """SingleMetric creates an EMF object with a single metric.
-
- EMF specification doesn't allow metrics with different dimensions.
- SingleMetric overrides MetricManager's add_metric method to do just that.
-
- Use `single_metric` when you need to create metrics with different dimensions,
- otherwise `aws_lambda_powertools.metrics.metrics.Metrics` is
- a more cost effective option
-
- Environment variables
- ---------------------
- POWERTOOLS_METRICS_NAMESPACE : str
- metric namespace
-
- Example
- -------
- **Creates cold start metric with function_version as dimension**
-
- import json
- from aws_lambda_powertools.metrics import single_metric, MetricUnit
- metric = single_metric(namespace="ServerlessAirline")
-
- metric.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1)
- metric.add_dimension(name="function_version", value=47)
-
- print(json.dumps(metric.serialize_metric_set(), indent=4))
-
- Parameters
- ----------
- MetricManager : MetricManager
- Inherits from `aws_lambda_powertools.metrics.base.MetricManager`
- """
-
- def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float) -> None:
- """Method to prevent more than one metric being created
-
- Parameters
- ----------
- name : str
- Metric name (e.g. BookingConfirmation)
- unit : MetricUnit
- Metric unit (e.g. "Seconds", MetricUnit.Seconds)
- value : float
- Metric value
- """
- if len(self.metric_set) > 0:
- logger.debug(f"Metric {name} already set, skipping...")
- return
- return super().add_metric(name, unit, value)
-
-
-@contextmanager
-def single_metric(
- name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None
-) -> Generator[SingleMetric, None, None]:
- """Context manager to simplify creation of a single metric
-
- Example
- -------
- **Creates cold start metric with function_version as dimension**
-
- from aws_lambda_powertools import single_metric
- from aws_lambda_powertools.metrics import MetricUnit
-
- with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ServerlessAirline") as metric:
- metric.add_dimension(name="function_version", value="47")
-
- **Same as above but set namespace using environment variable**
-
- $ export POWERTOOLS_METRICS_NAMESPACE="ServerlessAirline"
-
- from aws_lambda_powertools import single_metric
- from aws_lambda_powertools.metrics import MetricUnit
-
- with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric:
- metric.add_dimension(name="function_version", value="47")
-
- Parameters
- ----------
- name : str
- Metric name
- unit : MetricUnit
- `aws_lambda_powertools.helper.models.MetricUnit`
- value : float
- Metric value
- namespace: str
- Namespace for metrics
-
- Yields
- -------
- SingleMetric
- SingleMetric class instance
-
- Raises
- ------
- MetricUnitError
- When metric metric isn't supported by CloudWatch
- MetricValueError
- When metric value isn't a number
- SchemaValidationError
- When metric object fails EMF schema validation
- """
- metric_set: Optional[Dict] = None
- try:
- metric: SingleMetric = SingleMetric(namespace=namespace)
- metric.add_metric(name=name, unit=unit, value=value)
- yield metric
- metric_set = metric.serialize_metric_set()
- finally:
- print(json.dumps(metric_set, separators=(",", ":")))
+__all__ = ["SingleMetric", "single_metric"]
diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py
index cbf1d2eb2e2..43a45ff885d 100644
--- a/aws_lambda_powertools/metrics/metrics.py
+++ b/aws_lambda_powertools/metrics/metrics.py
@@ -1,15 +1,6 @@
-import functools
-import json
-import logging
-import warnings
-from typing import Any, Callable, Dict, Optional, Union
+from typing import Any, Dict, Optional
-from .base import MetricManager, MetricUnit
-from .metric import single_metric
-
-logger = logging.getLogger(__name__)
-
-is_cold_start = True
+from .base import MetricManager
class Metrics(MetricManager):
@@ -66,6 +57,11 @@ def lambda_handler():
When metric object fails EMF schema validation
"""
+ # NOTE: We use class attrs to share metrics data across instances
+ # this allows customers to initialize Metrics() throughout their code base (and middlewares)
+ # and not get caught by accident with metrics data loss, or data deduplication
+ # e.g., m1 and m2 add metric ProductCreated, however m1 has 'version' dimension but m2 doesn't
+ # Result: ProductCreated is created twice as we now have 2 different EMF blobs
_metrics: Dict[str, Any] = {}
_dimensions: Dict[str, str] = {}
_metadata: Dict[str, Any] = {}
@@ -73,19 +69,17 @@ def lambda_handler():
def __init__(self, service: Optional[str] = None, namespace: Optional[str] = None):
self.metric_set = self._metrics
- self.service = service
- self.namespace: Optional[str] = namespace
self.metadata_set = self._metadata
self.default_dimensions = self._default_dimensions
self.dimension_set = self._dimensions
- self.dimension_set.update(**self._default_dimensions)
- super().__init__(
+ self.dimension_set.update(**self._default_dimensions)
+ return super().__init__(
+ namespace=namespace,
+ service=service,
metric_set=self.metric_set,
dimension_set=self.dimension_set,
- namespace=self.namespace,
metadata_set=self.metadata_set,
- service=self.service,
)
def set_default_dimensions(self, **dimensions) -> None:
@@ -107,7 +101,7 @@ def set_default_dimensions(self, **dimensions) -> None:
@metrics.log_metrics()
def lambda_handler():
- return True
+ return True
"""
for name, value in dimensions.items():
self.add_dimension(name, value)
@@ -118,98 +112,19 @@ def clear_default_dimensions(self) -> None:
self.default_dimensions.clear()
def clear_metrics(self) -> None:
- logger.debug("Clearing out existing metric set from memory")
- self.metric_set.clear()
- self.dimension_set.clear()
- self.metadata_set.clear()
- self.set_default_dimensions(**self.default_dimensions) # re-add default dimensions
-
- def log_metrics(
- self,
- lambda_handler: Union[Callable[[Dict, Any], Any], Optional[Callable[[Dict, Any, Optional[Dict]], Any]]] = None,
- capture_cold_start_metric: bool = False,
- raise_on_empty_metrics: bool = False,
- default_dimensions: Optional[Dict[str, str]] = None,
- ):
- """Decorator to serialize and publish metrics at the end of a function execution.
-
- Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler).
-
- Example
- -------
- **Lambda function using tracer and metrics decorators**
-
- from aws_lambda_powertools import Metrics, Tracer
+ super().clear_metrics()
+ # re-add default dimensions
+ self.set_default_dimensions(**self.default_dimensions)
- metrics = Metrics(service="payment")
- tracer = Tracer(service="payment")
- @tracer.capture_lambda_handler
- @metrics.log_metrics
- def handler(event, context):
- ...
+class EphemeralMetrics(MetricManager):
+ """Non-singleton version of Metrics to not persist metrics across instances
- Parameters
- ----------
- lambda_handler : Callable[[Any, Any], Any], optional
- lambda function handler, by default None
- capture_cold_start_metric : bool, optional
- captures cold start metric, by default False
- raise_on_empty_metrics : bool, optional
- raise exception if no metrics are emitted, by default False
- default_dimensions: Dict[str, str], optional
- metric dimensions as key=value that will always be present
-
- Raises
- ------
- e
- Propagate error received
- """
+ NOTE: This is useful when you want to:
- # If handler is None we've been called with parameters
- # Return a partial function with args filled
- if lambda_handler is None:
- logger.debug("Decorator called with parameters")
- return functools.partial(
- self.log_metrics,
- capture_cold_start_metric=capture_cold_start_metric,
- raise_on_empty_metrics=raise_on_empty_metrics,
- default_dimensions=default_dimensions,
- )
-
- @functools.wraps(lambda_handler)
- def decorate(event, context):
- try:
- if default_dimensions:
- self.set_default_dimensions(**default_dimensions)
- response = lambda_handler(event, context)
- if capture_cold_start_metric:
- self.__add_cold_start_metric(context=context)
- finally:
- if not raise_on_empty_metrics and not self.metric_set:
- warnings.warn("No metrics to publish, skipping")
- else:
- metrics = self.serialize_metric_set()
- self.clear_metrics()
- print(json.dumps(metrics, separators=(",", ":")))
-
- return response
-
- return decorate
-
- def __add_cold_start_metric(self, context: Any) -> None:
- """Add cold start metric and function_name dimension
+ - Create metrics for distinct namespaces
+ - Create the same metrics with different dimensions more than once
+ """
- Parameters
- ----------
- context : Any
- Lambda context
- """
- global is_cold_start
- if is_cold_start:
- logger.debug("Adding cold start metric and function_name dimension")
- with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace=self.namespace) as metric:
- metric.add_dimension(name="function_name", value=context.function_name)
- if self.service:
- metric.add_dimension(name="service", value=str(self.service))
- is_cold_start = False
+ def __init__(self, service: Optional[str] = None, namespace: Optional[str] = None):
+ super().__init__(namespace=namespace, service=service)
diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py
index 30070382d31..884edb37e35 100644
--- a/aws_lambda_powertools/shared/functions.py
+++ b/aws_lambda_powertools/shared/functions.py
@@ -1,9 +1,10 @@
import base64
+import itertools
import logging
import os
import warnings
from binascii import Error as BinAsciiError
-from typing import Optional, Union
+from typing import Dict, Generator, Optional, Union, overload
from aws_lambda_powertools.shared import constants
@@ -47,6 +48,21 @@ def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bo
return choice if choice is not None else strtobool(env)
+@overload
+def resolve_env_var_choice(env: Optional[str], choice: float) -> float:
+ ...
+
+
+@overload
+def resolve_env_var_choice(env: Optional[str], choice: str) -> str:
+ ...
+
+
+@overload
+def resolve_env_var_choice(env: Optional[str], choice: Optional[str]) -> str:
+ ...
+
+
def resolve_env_var_choice(
env: Optional[str] = None, choice: Optional[Union[str, float]] = None
) -> Optional[Union[str, float]]:
@@ -100,3 +116,8 @@ def powertools_debug_is_set() -> bool:
return True
return False
+
+
+def slice_dictionary(data: Dict, chunk_size: int) -> Generator[Dict, None, None]:
+ for _ in range(0, len(data), chunk_size):
+ yield {dict_key: data[dict_key] for dict_key in itertools.islice(data, chunk_size)}
diff --git a/aws_lambda_powertools/utilities/feature_flags/appconfig.py b/aws_lambda_powertools/utilities/feature_flags/appconfig.py
index 8c8dbacc6c5..8695c1fd8c9 100644
--- a/aws_lambda_powertools/utilities/feature_flags/appconfig.py
+++ b/aws_lambda_powertools/utilities/feature_flags/appconfig.py
@@ -15,8 +15,6 @@
from .base import StoreProvider
from .exceptions import ConfigurationStoreError, StoreClientError
-TRANSFORM_TYPE = "json"
-
class AppConfigStore(StoreProvider):
def __init__(
@@ -74,7 +72,7 @@ def get_raw_configuration(self) -> Dict[str, Any]:
dict,
self._conf_store.get(
name=self.name,
- transform=TRANSFORM_TYPE,
+ transform="json",
max_age=self.cache_seconds,
),
)
diff --git a/aws_lambda_powertools/utilities/parameters/__init__.py b/aws_lambda_powertools/utilities/parameters/__init__.py
index 7dce2ac4c9a..9fcaa4fa701 100644
--- a/aws_lambda_powertools/utilities/parameters/__init__.py
+++ b/aws_lambda_powertools/utilities/parameters/__init__.py
@@ -9,7 +9,7 @@
from .dynamodb import DynamoDBProvider
from .exceptions import GetParameterError, TransformParameterError
from .secrets import SecretsProvider, get_secret
-from .ssm import SSMProvider, get_parameter, get_parameters
+from .ssm import SSMProvider, get_parameter, get_parameters, get_parameters_by_name
__all__ = [
"AppConfigProvider",
@@ -22,6 +22,7 @@
"get_app_config",
"get_parameter",
"get_parameters",
+ "get_parameters_by_name",
"get_secret",
"clear_caches",
]
diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py
index a3a340a62be..7884728024e 100644
--- a/aws_lambda_powertools/utilities/parameters/appconfig.py
+++ b/aws_lambda_powertools/utilities/parameters/appconfig.py
@@ -9,6 +9,8 @@
import boto3
from botocore.config import Config
+from aws_lambda_powertools.utilities.parameters.types import TransformOptions
+
if TYPE_CHECKING:
from mypy_boto3_appconfigdata import AppConfigDataClient
@@ -132,7 +134,7 @@ def get_app_config(
name: str,
environment: str,
application: Optional[str] = None,
- transform: Optional[str] = None,
+ transform: TransformOptions = None,
force_fetch: bool = False,
max_age: int = DEFAULT_MAX_AGE_SECS,
**sdk_options
diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py
index b76b16e1dd8..8587d3b5f3f 100644
--- a/aws_lambda_powertools/utilities/parameters/base.py
+++ b/aws_lambda_powertools/utilities/parameters/base.py
@@ -1,17 +1,31 @@
"""
Base for Parameter providers
"""
+from __future__ import annotations
import base64
import json
from abc import ABC, abstractmethod
-from collections import namedtuple
from datetime import datetime, timedelta
-from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Type, Union
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ NamedTuple,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+ overload,
+)
import boto3
from botocore.config import Config
+from aws_lambda_powertools.utilities.parameters.types import TransformOptions
+
from .exceptions import GetParameterError, TransformParameterError
if TYPE_CHECKING:
@@ -22,7 +36,6 @@
DEFAULT_MAX_AGE_SECS = 5
-ExpirableValue = namedtuple("ExpirableValue", ["value", "ttl"])
# These providers will be dynamically initialized on first use of the helper functions
DEFAULT_PROVIDERS: Dict[str, Any] = {}
TRANSFORM_METHOD_JSON = "json"
@@ -30,29 +43,42 @@
SUPPORTED_TRANSFORM_METHODS = [TRANSFORM_METHOD_JSON, TRANSFORM_METHOD_BINARY]
ParameterClients = Union["AppConfigDataClient", "SecretsManagerClient", "SSMClient"]
+TRANSFORM_METHOD_MAPPING = {
+ TRANSFORM_METHOD_JSON: json.loads,
+ TRANSFORM_METHOD_BINARY: base64.b64decode,
+ ".json": json.loads,
+ ".binary": base64.b64decode,
+ None: lambda x: x,
+}
+
+
+class ExpirableValue(NamedTuple):
+ value: str | bytes | Dict[str, Any]
+ ttl: datetime
+
class BaseProvider(ABC):
"""
Abstract Base Class for Parameter providers
"""
- store: Any = None
+ store: Dict[Tuple[str, TransformOptions], ExpirableValue]
def __init__(self):
"""
Initialize the base provider
"""
- self.store = {}
+ self.store: Dict[Tuple[str, TransformOptions], ExpirableValue] = {}
- def _has_not_expired(self, key: Tuple[str, Optional[str]]) -> bool:
+ def has_not_expired_in_cache(self, key: Tuple[str, TransformOptions]) -> bool:
return key in self.store and self.store[key].ttl >= datetime.now()
def get(
self,
name: str,
max_age: int = DEFAULT_MAX_AGE_SECS,
- transform: Optional[str] = None,
+ transform: TransformOptions = None,
force_fetch: bool = False,
**sdk_options,
) -> Optional[Union[str, dict, bytes]]:
@@ -95,7 +121,7 @@ def get(
value: Optional[Union[str, bytes, dict]] = None
key = (name, transform)
- if not force_fetch and self._has_not_expired(key):
+ if not force_fetch and self.has_not_expired_in_cache(key):
return self.store[key].value
try:
@@ -105,11 +131,11 @@ def get(
raise GetParameterError(str(exc))
if transform:
- if isinstance(value, bytes):
- value = value.decode("utf-8")
- value = transform_value(value, transform)
+ value = transform_value(key=name, value=value, transform=transform, raise_on_transform_error=True)
- self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age))
+ # NOTE: don't cache None, as they might've been failed transforms and may be corrected
+ if value is not None:
+ self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age))
return value
@@ -124,7 +150,7 @@ def get_multiple(
self,
path: str,
max_age: int = DEFAULT_MAX_AGE_SECS,
- transform: Optional[str] = None,
+ transform: TransformOptions = None,
raise_on_transform_error: bool = False,
force_fetch: bool = False,
**sdk_options,
@@ -160,8 +186,8 @@ def get_multiple(
"""
key = (path, transform)
- if not force_fetch and self._has_not_expired(key):
- return self.store[key].value
+ if not force_fetch and self.has_not_expired_in_cache(key):
+ return self.store[key].value # type: ignore # need to revisit entire typing here
try:
values = self._get_multiple(path, **sdk_options)
@@ -170,13 +196,8 @@ def get_multiple(
raise GetParameterError(str(exc))
if transform:
- transformed_values: dict = {}
- for (item, value) in values.items():
- _transform = get_transform_method(item, transform)
- if not _transform:
- continue
- transformed_values[item] = transform_value(value, _transform, raise_on_transform_error)
- values.update(transformed_values)
+ values.update(transform_value(values, transform, raise_on_transform_error))
+
self.store[key] = ExpirableValue(values, datetime.now() + timedelta(seconds=max_age))
return values
@@ -191,6 +212,12 @@ def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]:
def clear_cache(self):
self.store.clear()
+ def add_to_cache(self, key: Tuple[str, TransformOptions], value: Any, max_age: int):
+ if max_age <= 0:
+ return
+
+ self.store[key] = ExpirableValue(value, datetime.now() + timedelta(seconds=max_age))
+
@staticmethod
def _build_boto3_client(
service_name: str,
@@ -258,57 +285,81 @@ def _build_boto3_resource_client(
return session.resource(service_name=service_name, config=config, endpoint_url=endpoint_url)
-def get_transform_method(key: str, transform: Optional[str] = None) -> Optional[str]:
+def get_transform_method(value: str, transform: TransformOptions = None) -> Callable[..., Any]:
"""
Determine the transform method
Examples
-------
- >>> get_transform_method("key", "any_other_value")
+ >>> get_transform_method("key","any_other_value")
'any_other_value'
- >>> get_transform_method("key.json", "auto")
+ >>> get_transform_method("key.json","auto")
'json'
- >>> get_transform_method("key.binary", "auto")
+ >>> get_transform_method("key.binary","auto")
'binary'
- >>> get_transform_method("key", "auto")
+ >>> get_transform_method("key","auto")
None
- >>> get_transform_method("key", None)
+ >>> get_transform_method("key",None)
None
Parameters
---------
- key: str
- Only used when the tranform is "auto".
+ value: str
+ Only used when the transform is "auto".
transform: str, optional
Original transform method, only "auto" will try to detect the transform method by the key
Returns
------
- Optional[str]:
- The transform method either when transform is "auto" then None, "json" or "binary" is returned
- or the original transform method
+ Callable:
+ Transform function could be json.loads, base64.b64decode, or a lambda that echo the str value
"""
- if transform != "auto":
- return transform
+ transform_method = TRANSFORM_METHOD_MAPPING.get(transform)
+
+ if transform == "auto":
+ key_suffix = value.rsplit(".")[-1]
+ transform_method = TRANSFORM_METHOD_MAPPING.get(key_suffix, TRANSFORM_METHOD_MAPPING[None])
+
+ return cast(Callable, transform_method) # https://github.com/python/mypy/issues/10740
+
+
+@overload
+def transform_value(
+ value: Dict[str, Any],
+ transform: TransformOptions,
+ raise_on_transform_error: bool = False,
+ key: str = "",
+) -> Dict[str, Any]:
+ ...
+
- for transform_method in SUPPORTED_TRANSFORM_METHODS:
- if key.endswith("." + transform_method):
- return transform_method
- return None
+@overload
+def transform_value(
+ value: Union[str, bytes, Dict[str, Any]],
+ transform: TransformOptions,
+ raise_on_transform_error: bool = False,
+ key: str = "",
+) -> Optional[Union[str, bytes, Dict[str, Any]]]:
+ ...
def transform_value(
- value: str, transform: str, raise_on_transform_error: Optional[bool] = True
-) -> Optional[Union[dict, bytes]]:
+ value: Union[str, bytes, Dict[str, Any]],
+ transform: TransformOptions,
+ raise_on_transform_error: bool = True,
+ key: str = "",
+) -> Optional[Union[str, bytes, Dict[str, Any]]]:
"""
- Apply a transform to a value
+ Transform a value using one of the available options.
Parameters
---------
value: str
Parameter value to transform
transform: str
- Type of transform, supported values are "json" and "binary"
+ Type of transform, supported values are "json", "binary", and "auto" based on suffix (.json, .binary)
+ key: str
+ Parameter key when transform is auto to infer its transform method
raise_on_transform_error: bool, optional
Raises an exception if any transform fails, otherwise this will
return a None value for each transform that failed
@@ -318,18 +369,41 @@ def transform_value(
TransformParameterError:
When the parameter value could not be transformed
"""
+ # Maintenance: For v3, we should consider returning the original value for soft transform failures.
+
+ err_msg = "Unable to transform value using '{transform}' transform: {exc}"
+
+ if isinstance(value, bytes):
+ value = value.decode("utf-8")
+
+ if isinstance(value, dict):
+ # NOTE: We must handle partial failures when receiving multiple values
+ # where one of the keys might fail during transform, e.g. `{"a": "valid", "b": "{"}`
+ # expected: `{"a": "valid", "b": None}`
+
+ transformed_values: Dict[str, Any] = {}
+ for dict_key, dict_value in value.items():
+ transform_method = get_transform_method(value=dict_key, transform=transform)
+ try:
+ transformed_values[dict_key] = transform_method(dict_value)
+ except Exception as exc:
+ if raise_on_transform_error:
+ raise TransformParameterError(err_msg.format(transform=transform, exc=exc)) from exc
+ transformed_values[dict_key] = None
+ return transformed_values
+
+ if transform == "auto":
+ # key="a.json", value='{"a": "b"}', or key="a.binary", value="b64_encoded"
+ transform_method = get_transform_method(value=key, transform=transform)
+ else:
+ # value='{"key": "value"}
+ transform_method = get_transform_method(value=value, transform=transform)
try:
- if transform == TRANSFORM_METHOD_JSON:
- return json.loads(value)
- elif transform == TRANSFORM_METHOD_BINARY:
- return base64.b64decode(value)
- else:
- raise ValueError(f"Invalid transform type '{transform}'")
-
+ return transform_method(value)
except Exception as exc:
if raise_on_transform_error:
- raise TransformParameterError(str(exc))
+ raise TransformParameterError(err_msg.format(transform=transform, exc=exc)) from exc
return None
diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py
index 3b3e782fd45..ae4a76dac4a 100644
--- a/aws_lambda_powertools/utilities/parameters/ssm.py
+++ b/aws_lambda_powertools/utilities/parameters/ssm.py
@@ -1,17 +1,23 @@
"""
AWS SSM Parameter retrieval and caching utility
"""
+from __future__ import annotations
-
-from typing import TYPE_CHECKING, Any, Dict, Optional, Union
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, overload
import boto3
from botocore.config import Config
+from typing_extensions import Literal
+
+from aws_lambda_powertools.shared.functions import slice_dictionary
-from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider
+from .base import DEFAULT_MAX_AGE_SECS, DEFAULT_PROVIDERS, BaseProvider, transform_value
+from .exceptions import GetParameterError
+from .types import TransformOptions
if TYPE_CHECKING:
from mypy_boto3_ssm import SSMClient
+ from mypy_boto3_ssm.type_defs import GetParametersResultTypeDef
class SSMProvider(BaseProvider):
@@ -80,6 +86,8 @@ class SSMProvider(BaseProvider):
"""
client: Any = None
+ _MAX_GET_PARAMETERS_ITEM = 10
+ _ERRORS_KEY = "_errors"
def __init__(
self,
@@ -103,10 +111,10 @@ def get( # type: ignore[override]
self,
name: str,
max_age: int = DEFAULT_MAX_AGE_SECS,
- transform: Optional[str] = None,
+ transform: TransformOptions = None,
decrypt: bool = False,
force_fetch: bool = False,
- **sdk_options
+ **sdk_options,
) -> Optional[Union[str, dict, bytes]]:
"""
Retrieve a parameter value or return the cached value
@@ -187,7 +195,7 @@ def _get_multiple(self, path: str, decrypt: bool = False, recursive: bool = Fals
for page in self.client.get_paginator("get_parameters_by_path").paginate(**sdk_options):
for parameter in page.get("Parameters", []):
# Standardize the parameter name
- # The parameter name returned by SSM will contained the full path.
+ # The parameter name returned by SSM will contain the full path.
# However, for readability, we should return only the part after
# the path.
name = parameter["Name"]
@@ -199,6 +207,282 @@ def _get_multiple(self, path: str, decrypt: bool = False, recursive: bool = Fals
return parameters
+ # NOTE: When bandwidth permits, allocate a week to refactor to lower cognitive load
+ def get_parameters_by_name(
+ self,
+ parameters: Dict[str, Dict],
+ transform: TransformOptions = None,
+ decrypt: bool = False,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ raise_on_error: bool = True,
+ ) -> Dict[str, str] | Dict[str, bytes] | Dict[str, dict]:
+ """
+ Retrieve multiple parameter values by name from SSM or cache.
+
+ Raise_on_error decides on error handling strategy:
+
+ - A) Default to fail-fast. Raises GetParameterError upon any error
+ - B) Gracefully aggregate all parameters that failed under "_errors" key
+
+ It transparently uses GetParameter and/or GetParameters depending on decryption requirements.
+
+ ┌────────────────────────┐
+ ┌───▶ Decrypt entire batch │─────┐
+ │ └────────────────────────┘ │ ┌────────────────────┐
+ │ ├─────▶ GetParameters API │
+ ┌──────────────────┐ │ ┌────────────────────────┐ │ └────────────────────┘
+ │ Split batch │─── ┼──▶│ No decryption required │─────┘
+ └──────────────────┘ │ └────────────────────────┘
+ │ ┌────────────────────┐
+ │ ┌────────────────────────┐ │ GetParameter API │
+ └──▶│Decrypt some but not all│───────────▶────────────────────┤
+ └────────────────────────┘ │ GetParameters API │
+ └────────────────────┘
+
+ Parameters
+ ----------
+ parameters: List[Dict[str, Dict]]
+ List of parameter names, and any optional overrides
+ transform: str, optional
+ Transforms the content from a JSON object ('json') or base64 binary string ('binary')
+ decrypt: bool, optional
+ If the parameter values should be decrypted
+ max_age: int
+ Maximum age of the cached value
+ raise_on_error: bool
+ Whether to fail-fast or fail gracefully by including "_errors" key in the response, by default True
+
+ Raises
+ ------
+ GetParameterError
+ When the parameter provider fails to retrieve a parameter value for a given name.
+
+ When "_errors" reserved key is in parameters to be fetched from SSM.
+ """
+ # Init potential batch/decrypt batch responses and errors
+ batch_ret: Dict[str, Any] = {}
+ decrypt_ret: Dict[str, Any] = {}
+ batch_err: List[str] = []
+ decrypt_err: List[str] = []
+ response: Dict[str, Any] = {}
+
+ # NOTE: We fail early to avoid unintended graceful errors being replaced with their '_errors' param values
+ self._raise_if_errors_key_is_present(parameters, self._ERRORS_KEY, raise_on_error)
+
+ batch_params, decrypt_params = self._split_batch_and_decrypt_parameters(parameters, transform, max_age, decrypt)
+
+ # NOTE: We need to find out whether all parameters must be decrypted or not to know which API to use
+ ## Logic:
+ ##
+ ## GetParameters API -> When decrypt is used for all parameters in the the batch
+ ## GetParameter API -> When decrypt is used for one or more in the batch
+
+ if len(decrypt_params) != len(parameters):
+ decrypt_ret, decrypt_err = self._get_parameters_by_name_with_decrypt_option(decrypt_params, raise_on_error)
+ batch_ret, batch_err = self._get_parameters_batch_by_name(batch_params, raise_on_error, decrypt=False)
+ else:
+ batch_ret, batch_err = self._get_parameters_batch_by_name(decrypt_params, raise_on_error, decrypt=True)
+
+ # Fail-fast disabled, let's aggregate errors under "_errors" key so they can handle gracefully
+ if not raise_on_error:
+ response[self._ERRORS_KEY] = [*decrypt_err, *batch_err]
+
+ return {**response, **batch_ret, **decrypt_ret}
+
+ def _get_parameters_by_name_with_decrypt_option(
+ self, batch: Dict[str, Dict], raise_on_error: bool
+ ) -> Tuple[Dict, List]:
+ response: Dict[str, Any] = {}
+ errors: List[str] = []
+
+ # Decided for single-thread as it outperforms in 128M and 1G + reduce timeout risk
+ # see: https://github.com/awslabs/aws-lambda-powertools-python/issues/1040#issuecomment-1299954613
+ for parameter, options in batch.items():
+ try:
+ response[parameter] = self.get(parameter, options["max_age"], options["transform"], options["decrypt"])
+ except GetParameterError:
+ if raise_on_error:
+ raise
+ errors.append(parameter)
+ continue
+
+ return response, errors
+
+ def _get_parameters_batch_by_name(
+ self, batch: Dict[str, Dict], raise_on_error: bool = True, decrypt: bool = False
+ ) -> Tuple[Dict, List]:
+ """Slice batch and fetch parameters using GetParameters by max permitted"""
+ errors: List[str] = []
+
+ # Fetch each possible batch param from cache and return if entire batch is cached
+ cached_params = self._get_parameters_by_name_from_cache(batch)
+ if len(cached_params) == len(batch):
+ return cached_params, errors
+
+ # Slice batch by max permitted GetParameters call
+ batch_ret, errors = self._get_parameters_by_name_in_chunks(batch, cached_params, raise_on_error, decrypt)
+
+ return {**cached_params, **batch_ret}, errors
+
+ def _get_parameters_by_name_from_cache(self, batch: Dict[str, Dict]) -> Dict[str, Any]:
+ """Fetch each parameter from batch that hasn't been expired"""
+ cache = {}
+ for name, options in batch.items():
+ cache_key = (name, options["transform"])
+ if self.has_not_expired_in_cache(cache_key):
+ cache[name] = self.store[cache_key].value
+
+ return cache
+
+ def _get_parameters_by_name_in_chunks(
+ self, batch: Dict[str, Dict], cache: Dict[str, Any], raise_on_error: bool, decrypt: bool = False
+ ) -> Tuple[Dict, List]:
+ """Take out differences from cache and batch, slice it and fetch from SSM"""
+ response: Dict[str, Any] = {}
+ errors: List[str] = []
+
+ diff = {key: value for key, value in batch.items() if key not in cache}
+
+ for chunk in slice_dictionary(data=diff, chunk_size=self._MAX_GET_PARAMETERS_ITEM):
+ response, possible_errors = self._get_parameters_by_name(
+ parameters=chunk, raise_on_error=raise_on_error, decrypt=decrypt
+ )
+ response.update(response)
+ errors.extend(possible_errors)
+
+ return response, errors
+
+ def _get_parameters_by_name(
+ self, parameters: Dict[str, Dict], raise_on_error: bool = True, decrypt: bool = False
+ ) -> Tuple[Dict[str, Any], List[str]]:
+ """Use SSM GetParameters to fetch parameters, hydrate cache, and handle partial failure
+
+ Parameters
+ ----------
+ parameters : Dict[str, Dict]
+ Parameters to fetch
+ raise_on_error : bool, optional
+ Whether to fail-fast or fail gracefully by including "_errors" key in the response, by default True
+
+ Returns
+ -------
+ Dict[str, Any]
+ Retrieved parameters as key names and their values
+
+ Raises
+ ------
+ GetParameterError
+ When one or more parameters failed on fetching, and raise_on_error is enabled
+ """
+ ret: Dict[str, Any] = {}
+ batch_errors: List[str] = []
+ parameter_names = list(parameters.keys())
+
+ # All params in the batch must be decrypted
+ # we return early if we hit an unrecoverable exception like InvalidKeyId/InternalServerError
+ # everything else should technically be recoverable as GetParameters is non-atomic
+ try:
+ if decrypt:
+ response = self.client.get_parameters(Names=parameter_names, WithDecryption=True)
+ else:
+ response = self.client.get_parameters(Names=parameter_names)
+ except (self.client.exceptions.InvalidKeyId, self.client.exceptions.InternalServerError):
+ return ret, parameter_names
+
+ batch_errors = self._handle_any_invalid_get_parameter_errors(response, raise_on_error)
+ transformed_params = self._transform_and_cache_get_parameters_response(response, parameters, raise_on_error)
+
+ return transformed_params, batch_errors
+
+ def _transform_and_cache_get_parameters_response(
+ self, api_response: GetParametersResultTypeDef, parameters: Dict[str, Any], raise_on_error: bool = True
+ ) -> Dict[str, Any]:
+ response: Dict[str, Any] = {}
+
+ for parameter in api_response["Parameters"]:
+ name = parameter["Name"]
+ value = parameter["Value"]
+ options = parameters[name]
+ transform = options.get("transform")
+
+ # NOTE: If transform is set, we do it before caching to reduce number of operations
+ if transform:
+ value = transform_value(name, value, transform, raise_on_error) # type: ignore
+
+ _cache_key = (name, options["transform"])
+ self.add_to_cache(key=_cache_key, value=value, max_age=options["max_age"])
+
+ response[name] = value
+
+ return response
+
+ @staticmethod
+ def _handle_any_invalid_get_parameter_errors(
+ api_response: GetParametersResultTypeDef, raise_on_error: bool = True
+ ) -> List[str]:
+ """GetParameters is non-atomic. Failures don't always reflect in exceptions so we need to collect."""
+ failed_parameters = api_response["InvalidParameters"]
+ if failed_parameters:
+ if raise_on_error:
+ raise GetParameterError(f"Failed to fetch parameters: {failed_parameters}")
+
+ return failed_parameters
+
+ return []
+
+ @staticmethod
+ def _split_batch_and_decrypt_parameters(
+ parameters: Dict[str, Dict], transform: TransformOptions, max_age: int, decrypt: bool
+ ) -> Tuple[Dict[str, Dict], Dict[str, Dict]]:
+ """Split parameters that can be fetched by GetParameters vs GetParameter
+
+ Parameters
+ ----------
+ parameters : Dict[str, Dict]
+ Parameters containing names as key and optional config override as value
+ transform : TransformOptions
+ Transform configuration
+ max_age : int
+ How long to cache a parameter for
+ decrypt : bool
+ Whether to use KMS to decrypt a parameter
+
+ Returns
+ -------
+ Tuple[Dict[str, Dict], Dict[str, Dict]]
+ GetParameters and GetParameter parameters dict along with their overrides/globals merged
+ """
+ batch_parameters: Dict[str, Dict] = {}
+ decrypt_parameters: Dict[str, Any] = {}
+
+ for parameter, options in parameters.items():
+ # NOTE: TypeDict later
+ _overrides = options or {}
+ _overrides["transform"] = _overrides.get("transform") or transform
+
+ # These values can be falsy (False, 0)
+ if "decrypt" not in _overrides:
+ _overrides["decrypt"] = decrypt
+
+ if "max_age" not in _overrides:
+ _overrides["max_age"] = max_age
+
+ # NOTE: Split parameters who have decrypt OR have it global
+ if _overrides["decrypt"]:
+ decrypt_parameters[parameter] = _overrides
+ else:
+ batch_parameters[parameter] = _overrides
+
+ return batch_parameters, decrypt_parameters
+
+ @staticmethod
+ def _raise_if_errors_key_is_present(parameters: Dict, reserved_parameter: str, raise_on_error: bool):
+ """Raise GetParameterError if fail-fast is disabled and '_errors' key is in parameters batch"""
+ if not raise_on_error and reserved_parameter in parameters:
+ raise GetParameterError(
+ f"You cannot fetch a parameter named '{reserved_parameter}' in graceful error mode."
+ )
+
def get_parameter(
name: str,
@@ -206,8 +490,8 @@ def get_parameter(
decrypt: bool = False,
force_fetch: bool = False,
max_age: int = DEFAULT_MAX_AGE_SECS,
- **sdk_options
-) -> Union[str, list, dict, bytes]:
+ **sdk_options,
+) -> Union[str, dict, bytes]:
"""
Retrieve a parameter value from AWS Systems Manager (SSM) Parameter Store
@@ -275,7 +559,7 @@ def get_parameters(
force_fetch: bool = False,
max_age: int = DEFAULT_MAX_AGE_SECS,
raise_on_transform_error: bool = False,
- **sdk_options
+ **sdk_options,
) -> Union[Dict[str, str], Dict[str, dict], Dict[str, bytes]]:
"""
Retrieve multiple parameter values from AWS Systems Manager (SSM) Parameter Store
@@ -342,5 +626,116 @@ def get_parameters(
transform=transform,
raise_on_transform_error=raise_on_transform_error,
force_fetch=force_fetch,
- **sdk_options
+ **sdk_options,
+ )
+
+
+@overload
+def get_parameters_by_name(
+ parameters: Dict[str, Dict],
+ transform: None = None,
+ decrypt: bool = False,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ raise_on_error: bool = True,
+) -> Dict[str, str]:
+ ...
+
+
+@overload
+def get_parameters_by_name(
+ parameters: Dict[str, Dict],
+ transform: Literal["binary"],
+ decrypt: bool = False,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ raise_on_error: bool = True,
+) -> Dict[str, bytes]:
+ ...
+
+
+@overload
+def get_parameters_by_name(
+ parameters: Dict[str, Dict],
+ transform: Literal["json"],
+ decrypt: bool = False,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ raise_on_error: bool = True,
+) -> Dict[str, Dict[str, Any]]:
+ ...
+
+
+@overload
+def get_parameters_by_name(
+ parameters: Dict[str, Dict],
+ transform: Literal["auto"],
+ decrypt: bool = False,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ raise_on_error: bool = True,
+) -> Union[Dict[str, str], Dict[str, dict]]:
+ ...
+
+
+def get_parameters_by_name(
+ parameters: Dict[str, Any],
+ transform: TransformOptions = None,
+ decrypt: bool = False,
+ max_age: int = DEFAULT_MAX_AGE_SECS,
+ raise_on_error: bool = True,
+) -> Union[Dict[str, str], Dict[str, bytes], Dict[str, dict]]:
+ """
+ Retrieve multiple parameter values by name from AWS Systems Manager (SSM) Parameter Store
+
+ Parameters
+ ----------
+ parameters: List[Dict[str, Dict]]
+ List of parameter names, and any optional overrides
+ transform: str, optional
+ Transforms the content from a JSON object ('json') or base64 binary string ('binary')
+ decrypt: bool, optional
+ If the parameter values should be decrypted
+ max_age: int
+ Maximum age of the cached value
+ raise_on_error: bool, optional
+ Whether to fail-fast or fail gracefully by including "_errors" key in the response, by default True
+
+ Example
+ -------
+
+ **Retrieves multiple parameters from distinct paths from Systems Manager Parameter Store**
+
+ from aws_lambda_powertools.utilities.parameters import get_parameters_by_name
+
+ params = {
+ "/param": {},
+ "/json": {"transform": "json"},
+ "/binary": {"transform": "binary"},
+ "/no_cache": {"max_age": 0},
+ "/api_key": {"decrypt": True},
+ }
+
+ values = get_parameters_by_name(parameters=params)
+ for param_name, value in values.items():
+ print(f"{param_name}: {value}")
+
+ # "/param": value
+ # "/json": value
+ # "/binary": value
+ # "/no_cache": value
+ # "/api_key": value
+
+ Raises
+ ------
+ GetParameterError
+ When the parameter provider fails to retrieve a parameter value for
+ a given name.
+ """
+
+ # NOTE: Decided against using multi-thread due to single-thread outperforming in 128M and 1G + timeout risk
+ # see: https://github.com/awslabs/aws-lambda-powertools-python/issues/1040#issuecomment-1299954613
+
+ # Only create the provider if this function is called at least once
+ if "ssm" not in DEFAULT_PROVIDERS:
+ DEFAULT_PROVIDERS["ssm"] = SSMProvider()
+
+ return DEFAULT_PROVIDERS["ssm"].get_parameters_by_name(
+ parameters=parameters, max_age=max_age, transform=transform, decrypt=decrypt, raise_on_error=raise_on_error
)
diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py
new file mode 100644
index 00000000000..6a15873c496
--- /dev/null
+++ b/aws_lambda_powertools/utilities/parameters/types.py
@@ -0,0 +1,3 @@
+from typing_extensions import Literal
+
+TransformOptions = Literal["json", "binary", "auto", None]
diff --git a/docs/core/metrics.md b/docs/core/metrics.md
index 45e3ce1a4c0..e02b247f117 100644
--- a/docs/core/metrics.md
+++ b/docs/core/metrics.md
@@ -227,6 +227,61 @@ If you prefer not to use `log_metrics` because you might want to encapsulate add
--8<-- "examples/metrics/src/single_metric.py"
```
+### Metrics isolation
+
+You can use `EphemeralMetrics` class when looking to isolate multiple instances of metrics with distinct namespaces and/or dimensions.
+
+!!! note "This is a typical use case is for multi-tenant, or emitting same metrics for distinct applications."
+
+```python hl_lines="1 4" title="EphemeralMetrics usage"
+--8<-- "examples/metrics/src/ephemeral_metrics.py"
+```
+
+**Differences between `EphemeralMetrics` and `Metrics`**
+
+`EphemeralMetrics` has only two differences while keeping nearly the exact same set of features:
+
+| Feature | Metrics | EphemeralMetrics |
+| ----------------------------------------------------------------------------------------------------------- | ------- | ---------------- |
+| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - |
+| **[Default dimensions](#adding-default-dimensions) that persists across Lambda invocations** (metric flush) | Yes | - |
+
+!!! question "Why not changing the default `Metrics` behaviour to not share data across instances?"
+
+This is an intentional design to prevent accidental data deduplication or data loss issues due to [CloudWatch EMF](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html){target="_blank"} metric dimension constraint.
+
+In CloudWatch, there are two metric ingestion mechanisms: [EMF (async)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html){target="_blank"} and [`PutMetricData` API (sync)](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudwatch.html#CloudWatch.Client.put_metric_data){target="_blank"}.
+
+The former creates metrics asynchronously via CloudWatch Logs, and the latter uses a synchronous and more flexible ingestion API.
+
+!!! important "Key concept"
+ CloudWatch [considers a metric unique](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#Metric){target="_blank"} by a combination of metric **name**, metric **namespace**, and zero or more metric **dimensions**.
+
+With EMF, metric dimensions are shared with any metrics you define. With `PutMetricData` API, you can set a [list](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html) defining one or more metrics with distinct dimensions.
+
+This is a subtle yet important distinction. Imagine you had the following metrics to emit:
+
+| Metric Name | Dimension | Intent |
+| ---------------------- | ----------------------------------------- | ------------------ |
+| **SuccessfulBooking** | service="booking", **tenant_id**="sample" | Application metric |
+| **IntegrationLatency** | service="booking", function_name="sample" | Operational metric |
+| **ColdStart** | service="booking", function_name="sample" | Operational metric |
+
+The `tenant_id` dimension could vary leading to two common issues:
+
+1. `ColdStart` metric will be created multiple times (N * number of unique tenant_id dimension value), despite the `function_name` being the same
+2. `IntegrationLatency` metric will be also created multiple times due to `tenant_id` as well as `function_name` (may or not be intentional)
+
+These issues are exacerbated when you create **(A)** metric dimensions conditionally, **(B)** multiple metrics' instances throughout your code instead of reusing them (globals). Subsequent metrics' instances will have (or lack) different metric dimensions resulting in different metrics and data points with the same name.
+
+!!! note "Intentional design to address these scenarios"
+
+**On 1**, when you enable [capture_start_metric feature](#capturing-cold-start-metric), we transparently create and flush an additional EMF JSON Blob that is independent from your application metrics. This prevents data pollution.
+
+**On 2**, you can use `EphemeralMetrics` to create an additional EMF JSON Blob from your application metric (`SuccessfulBooking`). This ensures that `IntegrationLatency` operational metric data points aren't tied to any dynamic dimension values like `tenant_id`.
+
+That is why `Metrics` shares data across instances by default, as that covers 80% of use cases and different personas using Powertools. This allows them to instantiate `Metrics` in multiple places throughout their code - be a separate file, a middleware, or an abstraction that sets default dimensions.
+
## Testing your code
### Environment variables
diff --git a/docs/core/tracer.md b/docs/core/tracer.md
index ac0b73c9e27..ae4af4e9cd5 100644
--- a/docs/core/tracer.md
+++ b/docs/core/tracer.md
@@ -19,6 +19,8 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github.
???+ tip
All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}.
+!!! note "Tracer relies on AWS X-Ray SDK over [OpenTelememetry Distro (ADOT)](https://aws-otel.github.io/docs/getting-started/lambda){target="_blank"} for optimal cold start (lower latency)."
+
### Install
!!! info "This is not necessary if you're installing Powertools via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}"
diff --git a/docs/index.md b/docs/index.md
index 2eb543f998c..fb52c187e91 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -26,8 +26,8 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su
Powertools is available in the following formats:
-* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:12**](#){: .copyMe}:clipboard:
-* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12**](#){: .copyMe}:clipboard:
+* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:13**](#){: .copyMe}:clipboard:
+* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13**](#){: .copyMe}:clipboard:
* **PyPi**: **`pip install "aws-lambda-powertools"`**
???+ info "Some utilities require additional dependencies"
@@ -67,55 +67,55 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
| Region | Layer ARN |
| ---------------- | ---------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:12](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:13](#){: .copyMe}:clipboard: |
=== "arm64"
| Region | Layer ARN |
| ---------------- | ---------------------------------------------------------------------------------------------------------------- |
- | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
- | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12](#){: .copyMe}:clipboard: |
+ | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
+ | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13](#){: .copyMe}:clipboard: |
??? note "Note: Click to expand and copy code snippets for popular frameworks"
@@ -128,7 +128,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Type: AWS::Serverless::Function
Properties:
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:12
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:13
```
=== "Serverless framework"
@@ -138,7 +138,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
hello:
handler: lambda_function.lambda_handler
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:12
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:13
```
=== "CDK"
@@ -154,7 +154,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:12"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:13"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -203,7 +203,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:12"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:13"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
}
@@ -256,7 +256,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13
❯ amplify push -y
@@ -267,7 +267,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:12
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:13
? Do you want to edit the local lambda function now? No
```
@@ -276,7 +276,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Change {region} to your AWS region, e.g. `eu-west-1`
```bash title="AWS CLI"
- aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:12 --region {region}
+ aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:13 --region {region}
```
The pre-signed URL to download this Lambda Layer will be within `Location` key.
@@ -291,7 +291,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Properties:
Architectures: [arm64]
Layers:
- - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12
+ - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13
```
=== "Serverless framework"
@@ -302,7 +302,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
handler: lambda_function.lambda_handler
architecture: arm64
layers:
- - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12
+ - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13
```
=== "CDK"
@@ -318,7 +318,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn(
self,
id="lambda-powertools",
- layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12"
+ layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13"
)
aws_lambda.Function(self,
'sample-app-lambda',
@@ -368,7 +368,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
role = aws_iam_role.iam_for_lambda.arn
handler = "index.test"
runtime = "python3.9"
- layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12"]
+ layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13"]
architectures = ["arm64"]
source_code_hash = filebase64sha256("lambda_function_payload.zip")
@@ -424,7 +424,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
? Do you want to configure advanced settings? Yes
...
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13
❯ amplify push -y
@@ -435,7 +435,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
- Name:
? Which setting do you want to update? Lambda layers configuration
? Do you want to enable Lambda layers for this function? Yes
- ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12
+ ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13
? Do you want to edit the local lambda function now? No
```
@@ -443,7 +443,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Change {region} to your AWS region, e.g. `eu-west-1`
```bash title="AWS CLI"
- aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:12 --region {region}
+ aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:13 --region {region}
```
The pre-signed URL to download this Lambda Layer will be within `Location` key.
@@ -452,8 +452,6 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https:
Container Image deployment (OCI) or inline Lambda functions do not support Lambda Layers.
- Lambda Powertools Lambda Layer do not include `pydantic` library - required dependency for the `parser` utility. See [SAR](#sar) option instead.
-
#### SAR
Serverless Application Repository (SAR) App deploys a CloudFormation stack with a copy of our Lambda Layer in your AWS account and region.
diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md
index 6b7d64b66b9..9441d94fe12 100644
--- a/docs/utilities/parameters.md
+++ b/docs/utilities/parameters.md
@@ -24,34 +24,99 @@ This utility requires additional permissions to work as expected.
???+ note
Different parameter providers require different permissions.
-| Provider | Function/Method | IAM Permission |
-| ------------------- | -----------------------------------------------------------------| -----------------------------------------------------------------------------|
-| SSM Parameter Store | `get_parameter`, `SSMProvider.get` | `ssm:GetParameter` |
-| SSM Parameter Store | `get_parameters`, `SSMProvider.get_multiple` | `ssm:GetParametersByPath` |
-| SSM Parameter Store | If using `decrypt=True` | You must add an additional permission `kms:Decrypt` |
-| Secrets Manager | `get_secret`, `SecretsManager.get` | `secretsmanager:GetSecretValue` |
-| DynamoDB | `DynamoDBProvider.get` | `dynamodb:GetItem` |
-| DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb:Query` |
-| App Config | `get_app_config`, `AppConfigProvider.get_app_config` | `appconfig:GetLatestConfiguration` and `appconfig:StartConfigurationSession` |
+| Provider | Function/Method | IAM Permission |
+| --------- | ---------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
+| SSM | **`get_parameter`**, **`SSMProvider.get`** | **`ssm:GetParameter`** |
+| SSM | **`get_parameters`**, **`SSMProvider.get_multiple`** | **`ssm:GetParametersByPath`** |
+| SSM | **`get_parameters_by_name`**, **`SSMProvider.get_parameters_by_name`** | **`ssm:GetParameter`** and **`ssm:GetParameters`** |
+| SSM | If using **`decrypt=True`** | You must add an additional permission **`kms:Decrypt`** |
+| Secrets | **`get_secret`**, **`SecretsManager.get`** | **`secretsmanager:GetSecretValue`** |
+| DynamoDB | **`DynamoDBProvider.get`** | **`dynamodb:GetItem`** |
+| DynamoDB | **`DynamoDBProvider.get_multiple`** | **`dynamodb:Query`** |
+| AppConfig | **`get_app_config`**, **`AppConfigProvider.get_app_config`** | **`appconfig:GetLatestConfiguration`** and **`appconfig:StartConfigurationSession`** |
### Fetching parameters
You can retrieve a single parameter using `get_parameter` high-level function.
-For multiple parameters, you can use `get_parameters` and pass a path to retrieve them recursively.
-
-```python hl_lines="1 5 9" title="Fetching multiple parameters recursively"
+```python hl_lines="5" title="Fetching a single parameter"
from aws_lambda_powertools.utilities import parameters
def handler(event, context):
# Retrieve a single parameter
value = parameters.get_parameter("/my/parameter")
- # Retrieve multiple parameters from a path prefix recursively
- # This returns a dict with the parameter name as key
- values = parameters.get_parameters("/my/path/prefix")
- for k, v in values.items():
- print(f"{k}: {v}")
+```
+
+For multiple parameters, you can use either:
+
+* `get_parameters` to recursively fetch all parameters by path.
+* `get_parameters_by_name` to fetch distinct parameters by their full name. It also accepts custom caching, transform, decrypt per parameter.
+
+=== "get_parameters"
+
+ ```python hl_lines="1 6"
+ from aws_lambda_powertools.utilities import parameters
+
+ def handler(event, context):
+ # Retrieve multiple parameters from a path prefix recursively
+ # This returns a dict with the parameter name as key
+ values = parameters.get_parameters("/my/path/prefix")
+ for parameter, value in values.items():
+ print(f"{parameter}: {value}")
+ ```
+
+=== "get_parameters_by_name"
+
+ ```python hl_lines="3 5 14"
+ from typing import Any
+
+ from aws_lambda_powertools.utilities import get_parameters_by_name
+
+ parameters = {
+ "/develop/service/commons/telemetry/config": {"max_age": 300, "transform": "json"},
+ "/no_cache_param": {"max_age": 0},
+ # inherit default values
+ "/develop/service/payment/api/capture/url": {},
+ }
+
+ def handler(event, context):
+ # This returns a dict with the parameter name as key
+ response: dict[str, Any] = parameters.get_parameters_by_name(parameters=parameters, max_age=60)
+ for parameter, value in response.items():
+ print(f"{parameter}: {value}")
+ ```
+
+???+ tip "`get_parameters_by_name` supports graceful error handling"
+ By default, we will raise `GetParameterError` when any parameter fails to be fetched. You can override it by setting `raise_on_error=False`.
+
+ When disabled, we take the following actions:
+
+ * Add failed parameter name in the `_errors` key, _e.g._, `{_errors: ["/param1", "/param2"]}`
+ * Keep only successful parameter names and their values in the response
+ * Raise `GetParameterError` if any of your parameters is named `_errors`
+
+```python hl_lines="3 5 12-13 15" title="Graceful error handling"
+from typing import Any
+
+from aws_lambda_powertools.utilities import get_parameters_by_name
+
+parameters = {
+ "/develop/service/commons/telemetry/config": {"max_age": 300, "transform": "json"},
+ # it would fail by default
+ "/this/param/does/not/exist"
+}
+
+def handler(event, context):
+ values: dict[str, Any] = parameters.get_parameters_by_name(parameters=parameters, raise_on_error=False)
+ errors: list[str] = values.get("_errors", [])
+
+ # Handle gracefully, since '/this/param/does/not/exist' will only be available in `_errors`
+ if errors:
+ ...
+
+ for parameter, value in values.items():
+ print(f"{parameter}: {value}")
```
### Fetching secrets
diff --git a/examples/metrics/src/ephemeral_metrics.py b/examples/metrics/src/ephemeral_metrics.py
new file mode 100644
index 00000000000..930404a563f
--- /dev/null
+++ b/examples/metrics/src/ephemeral_metrics.py
@@ -0,0 +1,9 @@
+from aws_lambda_powertools.metrics import EphemeralMetrics, MetricUnit
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+metrics = EphemeralMetrics()
+
+
+@metrics.log_metrics
+def lambda_handler(event: dict, context: LambdaContext):
+ metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1)
diff --git a/poetry.lock b/poetry.lock
index 1ef3728253a..8ca59c6145a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -45,7 +45,7 @@ typeguard = ">=2.13.3,<2.14.0"
[[package]]
name = "aws-cdk-lib"
-version = "2.49.0"
+version = "2.50.0"
description = "Version 2 of the AWS Cloud Development Kit library"
category = "dev"
optional = false
@@ -53,7 +53,7 @@ python-versions = "~=3.7"
[package.dependencies]
constructs = ">=10.0.0,<11.0.0"
-jsii = ">=1.70.0,<2.0.0"
+jsii = ">=1.69.0,<2.0.0"
publication = ">=0.0.3"
typeguard = ">=2.13.3,<2.14.0"
@@ -836,8 +836,8 @@ typing-extensions = ">=4.1.0"
[[package]]
name = "mypy-boto3-cloudformation"
-version = "1.25.0"
-description = "Type annotations for boto3.CloudFormation 1.25.0 service generated with mypy-boto3-builder 7.11.10"
+version = "1.26.0.post1"
+description = "Type annotations for boto3.CloudFormation 1.26.0 service generated with mypy-boto3-builder 7.11.10"
category = "dev"
optional = false
python-versions = ">=3.7"
@@ -858,8 +858,8 @@ typing-extensions = ">=4.1.0"
[[package]]
name = "mypy-boto3-dynamodb"
-version = "1.25.0"
-description = "Type annotations for boto3.DynamoDB 1.25.0 service generated with mypy-boto3-builder 7.11.10"
+version = "1.26.0.post1"
+description = "Type annotations for boto3.DynamoDB 1.26.0 service generated with mypy-boto3-builder 7.11.10"
category = "dev"
optional = false
python-versions = ">=3.7"
@@ -1511,7 +1511,7 @@ validation = ["fastjsonschema"]
[metadata]
lock-version = "1.1"
python-versions = "^3.7.4"
-content-hash = "48a6c11b4ef71716e88efa7ffa474aa73fd7fcb02553ffd49c0d03fe72c1f838"
+content-hash = "071841d4883c874f0f7ea8a8bc9b4b9cce97eaf8fce1122e2304d74879305b3d"
[metadata.files]
attrs = [
@@ -1527,8 +1527,8 @@ aws-cdk-aws-apigatewayv2-integrations-alpha = [
{file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.47.0a0-py3-none-any.whl", hash = "sha256:4633c4e020400c7ee5790652f099f02d4f84bab24fff013250e2b41a4ffca1b0"},
]
aws-cdk-lib = [
- {file = "aws-cdk-lib-2.49.0.tar.gz", hash = "sha256:9146f03988aff823cb5b561090875c311d733bf1786859c706381e9b49f6adb7"},
- {file = "aws_cdk_lib-2.49.0-py3-none-any.whl", hash = "sha256:c9c197463e2685aa67248c2cea2a4d3f15e33a3c19250aa34e754e455295417b"},
+ {file = "aws-cdk-lib-2.50.0.tar.gz", hash = "sha256:60a44b4045cbe1ab33e001d62ded579a45b5f6b011c612f6804c109666807918"},
+ {file = "aws_cdk_lib-2.50.0-py3-none-any.whl", hash = "sha256:8f9157c906485308631edbf398a77641e5986aaccba73c3254502138a340a79a"},
]
aws-sam-translator = [
{file = "aws-sam-translator-1.53.0.tar.gz", hash = "sha256:392ed4f5fb08f72cb68a8800f0bc278d2a3b6609bd1ac66bfcdeaaa94cdc18e5"},
@@ -1900,16 +1900,16 @@ mypy-boto3-appconfigdata = [
{file = "mypy_boto3_appconfigdata-1.25.0-py3-none-any.whl", hash = "sha256:21a332c85080ce2c5416b751f4fc4870e057af85d1aedc33516bde2a86330caa"},
]
mypy-boto3-cloudformation = [
- {file = "mypy-boto3-cloudformation-1.25.0.tar.gz", hash = "sha256:1251b0f24b0a7c21a6e1f7f88fbf6aaa7fcc490b82d6ada46b5778cfd12d112f"},
- {file = "mypy_boto3_cloudformation-1.25.0-py3-none-any.whl", hash = "sha256:469e3cbc0a78003cc3134dece0203d0a0bfa908af34187aa53cd610b5f9bb19f"},
+ {file = "mypy-boto3-cloudformation-1.26.0.post1.tar.gz", hash = "sha256:9e8dce3149c5f5dee5ab05850ec9cae0925abb9da3aa63397b098219709db077"},
+ {file = "mypy_boto3_cloudformation-1.26.0.post1-py3-none-any.whl", hash = "sha256:e0dd01030209b77c3159a299a04a5c6353a6feb0dd49bff9f5acec9e0274264c"},
]
mypy-boto3-cloudwatch = [
{file = "mypy-boto3-cloudwatch-1.25.0.tar.gz", hash = "sha256:d5323ffeafe5144a232e27242c5d2f334f5e7ff10d0733145328888783ffcf12"},
{file = "mypy_boto3_cloudwatch-1.25.0-py3-none-any.whl", hash = "sha256:e4934d92972f8ea531959593e476a5967b16aed223dc3c076e7e123acc8a2e77"},
]
mypy-boto3-dynamodb = [
- {file = "mypy-boto3-dynamodb-1.25.0.tar.gz", hash = "sha256:a858453090955e29c0ca479ea19f627a2d0dcf916469b104a5c6ad648f1299ba"},
- {file = "mypy_boto3_dynamodb-1.25.0-py3-none-any.whl", hash = "sha256:03437167a084ac0eb718fdaf5931949247b344acc4f75b8b5528f078c8f6f073"},
+ {file = "mypy-boto3-dynamodb-1.26.0.post1.tar.gz", hash = "sha256:731141ff962033b77603a8a02626d64eb8575a0070e865aff31fe7443e4be6e3"},
+ {file = "mypy_boto3_dynamodb-1.26.0.post1-py3-none-any.whl", hash = "sha256:abe06c4c819ef2faa4b2f5cea127549f4c50e83a9869be80c9e77893f682a11b"},
]
mypy-boto3-lambda = [
{file = "mypy-boto3-lambda-1.25.0.tar.gz", hash = "sha256:441ea9b9a6aa94a70e4e69dd9c7148434e7e501decb5cd8e278f8ca878ef77d3"},
diff --git a/pyproject.toml b/pyproject.toml
index cddceb2388d..57a330be504 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "aws_lambda_powertools"
-version = "2.0.0"
+version = "2.2.0"
description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more."
authors = ["Amazon Web Services"]
include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"]
@@ -24,6 +24,7 @@ aws-xray-sdk = { version = "^2.8.0", optional = true }
fastjsonschema = { version = "^2.14.5", optional = true }
pydantic = { version = "^1.8.2", optional = true }
boto3 = { version = "^1.20.32", optional = true }
+typing-extensions = "^4.4.0"
[tool.poetry.dev-dependencies]
coverage = {extras = ["toml"], version = "^6.2"}
@@ -50,15 +51,15 @@ mkdocs-git-revision-date-plugin = "^0.3.2"
mike = "^1.1.2"
retry = "^0.9.2"
pytest-xdist = "^2.5.0"
-aws-cdk-lib = "^2.49.0"
+aws-cdk-lib = "^2.50.0"
"aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0"
"aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0"
pytest-benchmark = "^3.4.1"
python-snappy = "^0.6.1"
mypy-boto3-appconfig = "^1.24.29"
-mypy-boto3-cloudformation = "^1.24.0"
+mypy-boto3-cloudformation = "^1.26.0"
mypy-boto3-cloudwatch = "^1.24.35"
-mypy-boto3-dynamodb = "^1.24.60"
+mypy-boto3-dynamodb = "^1.26.0"
mypy-boto3-lambda = "^1.24.0"
mypy-boto3-logs = "^1.24.0"
mypy-boto3-secretsmanager = "^1.24.11"
diff --git a/tests/e2e/parameters/handlers/parameter_ssm_get_parameters_by_name.py b/tests/e2e/parameters/handlers/parameter_ssm_get_parameters_by_name.py
new file mode 100644
index 00000000000..948fad2aa12
--- /dev/null
+++ b/tests/e2e/parameters/handlers/parameter_ssm_get_parameters_by_name.py
@@ -0,0 +1,15 @@
+import json
+import os
+from typing import Any, Dict, List, cast
+
+from aws_lambda_powertools.utilities.parameters.ssm import get_parameters_by_name
+from aws_lambda_powertools.utilities.typing import LambdaContext
+
+parameters_list: List[str] = cast(List, json.loads(os.getenv("parameters", "")))
+
+
+def lambda_handler(event: dict, context: LambdaContext) -> Dict[str, Any]:
+ parameters_to_fetch: Dict[str, Any] = {param: {} for param in parameters_list}
+
+ # response`{parameter:value}`
+ return get_parameters_by_name(parameters=parameters_to_fetch, max_age=0)
diff --git a/tests/e2e/parameters/infrastructure.py b/tests/e2e/parameters/infrastructure.py
index d0fb1b6c60c..e2cd5101ba7 100644
--- a/tests/e2e/parameters/infrastructure.py
+++ b/tests/e2e/parameters/infrastructure.py
@@ -1,18 +1,38 @@
-from pyclbr import Function
+import json
+from typing import List
-from aws_cdk import CfnOutput
+from aws_cdk import CfnOutput, Duration
from aws_cdk import aws_appconfig as appconfig
from aws_cdk import aws_iam as iam
+from aws_cdk import aws_ssm as ssm
+from aws_cdk.aws_lambda import Function
-from tests.e2e.utils.data_builder import build_service_name
+from tests.e2e.utils.data_builder import build_random_value, build_service_name
from tests.e2e.utils.infrastructure import BaseInfrastructure
class ParametersStack(BaseInfrastructure):
def create_resources(self):
- functions = self.create_lambda_functions()
+ parameters = self._create_ssm_parameters()
+
+ env_vars = {"parameters": json.dumps(parameters)}
+ functions = self.create_lambda_functions(
+ function_props={"environment": env_vars, "timeout": Duration.seconds(30)}
+ )
+
self._create_app_config(function=functions["ParameterAppconfigFreeformHandler"])
+ # NOTE: Enforce least-privilege for our param tests only
+ functions["ParameterSsmGetParametersByName"].add_to_role_policy(
+ iam.PolicyStatement(
+ effect=iam.Effect.ALLOW,
+ actions=[
+ "ssm:GetParameter",
+ ],
+ resources=[f"arn:aws:ssm:{self.region}:{self.account_id}:parameter/powertools/e2e/parameters/*"],
+ )
+ )
+
def _create_app_config(self, function: Function):
service_name = build_service_name()
@@ -106,3 +126,16 @@ def _create_app_config_freeform(
resources=["*"],
)
)
+
+ def _create_ssm_parameters(self) -> List[str]:
+ parameters: List[str] = []
+
+ for _ in range(10):
+ param = f"/powertools/e2e/parameters/{build_random_value()}"
+ rand = build_random_value()
+ ssm.StringParameter(self.stack, f"param-{rand}", parameter_name=param, string_value=rand)
+ parameters.append(param)
+
+ CfnOutput(self.stack, "ParametersNameList", value=json.dumps(parameters))
+
+ return parameters
diff --git a/tests/e2e/parameters/test_ssm.py b/tests/e2e/parameters/test_ssm.py
new file mode 100644
index 00000000000..7e9614f8ea0
--- /dev/null
+++ b/tests/e2e/parameters/test_ssm.py
@@ -0,0 +1,34 @@
+import json
+from typing import Any, Dict, List
+
+import pytest
+
+from tests.e2e.utils import data_fetcher
+
+
+@pytest.fixture
+def ssm_get_parameters_by_name_fn_arn(infrastructure: dict) -> str:
+ return infrastructure.get("ParameterSsmGetParametersByNameArn", "")
+
+
+@pytest.fixture
+def parameters_list(infrastructure: dict) -> List[str]:
+ param_list = infrastructure.get("ParametersNameList", "[]")
+ return json.loads(param_list)
+
+
+#
+def test_get_parameters_by_name(
+ ssm_get_parameters_by_name_fn_arn: str,
+ parameters_list: str,
+):
+ # GIVEN/WHEN
+ function_response, _ = data_fetcher.get_lambda_response(lambda_arn=ssm_get_parameters_by_name_fn_arn)
+ parameter_values: Dict[str, Any] = json.loads(function_response["Payload"].read().decode("utf-8"))
+
+ # THEN
+ for param in parameters_list:
+ try:
+ assert parameter_values[param] is not None
+ except (KeyError, TypeError):
+ pytest.fail(f"Parameter {param} not found in response")
diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py
index 96dd3b41b25..c45c138ad59 100644
--- a/tests/functional/test_metrics.py
+++ b/tests/functional/test_metrics.py
@@ -7,13 +7,17 @@
from aws_lambda_powertools import Metrics, single_metric
from aws_lambda_powertools.metrics import (
+ EphemeralMetrics,
MetricUnit,
MetricUnitError,
MetricValueError,
SchemaValidationError,
)
-from aws_lambda_powertools.metrics import metrics as metrics_global
-from aws_lambda_powertools.metrics.base import MAX_DIMENSIONS, MetricManager
+from aws_lambda_powertools.metrics.base import (
+ MAX_DIMENSIONS,
+ MetricManager,
+ reset_cold_start_flag,
+)
@pytest.fixture(scope="function", autouse=True)
@@ -21,7 +25,7 @@ def reset_metric_set():
metrics = Metrics()
metrics.clear_metrics()
metrics.clear_default_dimensions()
- metrics_global.is_cold_start = True # ensure each test has cold start
+ reset_cold_start_flag() # ensure each test has cold start
yield
@@ -209,6 +213,29 @@ def test_service_env_var(monkeypatch, capsys, metric, namespace):
assert expected == output
+def test_service_env_var_with_metrics_instance(monkeypatch, capsys, metric, namespace, service):
+ # GIVEN we use POWERTOOLS_SERVICE_NAME
+ monkeypatch.setenv("POWERTOOLS_SERVICE_NAME", service)
+
+ # WHEN initializing Metrics without an explicit service name
+ metrics = Metrics(namespace=namespace)
+ metrics.add_metric(**metric)
+
+ @metrics.log_metrics
+ def lambda_handler(_, __):
+ pass
+
+ lambda_handler({}, {})
+
+ output = capture_metrics_output(capsys)
+ expected_dimension = {"name": "service", "value": service}
+ expected = serialize_single_metric(metric=metric, dimension=expected_dimension, namespace=namespace)
+
+ # THEN a metric should be logged using the implicitly created "service" dimension
+ remove_timestamp(metrics=[output, expected])
+ assert expected == output
+
+
def test_metrics_spillover(monkeypatch, capsys, metric, dimension, namespace, a_hundred_metrics):
# GIVEN Metrics is initialized and we have over a hundred metrics to add
my_metrics = Metrics(namespace=namespace)
@@ -925,3 +952,61 @@ def test_metrics_reuse_metadata_set(metric, dimension, namespace):
# THEN both class instances should have the same metadata set
assert my_metrics_2.metadata_set == my_metrics.metadata_set
+
+
+def test_ephemeral_metrics_isolates_data_set(metric, dimension, namespace, metadata):
+ # GIVEN two EphemeralMetrics instances are initialized
+ my_metrics = EphemeralMetrics(namespace=namespace)
+ isolated_metrics = EphemeralMetrics(namespace=namespace)
+
+ # WHEN metrics, dimensions and metadata are added to the first instance
+ my_metrics.add_dimension(**dimension)
+ my_metrics.add_metric(**metric)
+ my_metrics.add_metadata(**metadata)
+
+ # THEN the non-singleton instance should not have them
+ assert my_metrics.metric_set != isolated_metrics.metric_set
+ assert my_metrics.metadata_set != isolated_metrics.metadata_set
+ assert my_metrics.dimension_set != isolated_metrics.dimension_set
+
+
+def test_ephemeral_metrics_combined_with_metrics(metric, dimension, namespace, metadata):
+ # GIVEN Metrics and EphemeralMetrics instances are initialized
+ my_metrics = Metrics(namespace=namespace)
+ isolated_metrics = EphemeralMetrics(namespace=namespace)
+
+ # WHEN metrics, dimensions and metadata are added to the first instance
+ my_metrics.add_dimension(**dimension)
+ my_metrics.add_metric(**metric)
+ my_metrics.add_metadata(**metadata)
+
+ # THEN EphemeralMetrics instance should not have them
+ assert my_metrics.metric_set != isolated_metrics.metric_set
+ assert my_metrics.metadata_set != isolated_metrics.metadata_set
+ assert my_metrics.dimension_set != isolated_metrics.dimension_set
+
+
+def test_ephemeral_metrics_nested_log_metrics(metric, dimension, namespace, metadata, capsys):
+ # GIVEN two distinct Metrics are initialized
+ my_metrics = Metrics(namespace=namespace)
+ isolated_metrics = EphemeralMetrics(namespace=namespace)
+
+ my_metrics.add_metric(**metric)
+ my_metrics.add_dimension(**dimension)
+ my_metrics.add_metadata(**metadata)
+
+ isolated_metrics.add_metric(**metric)
+ isolated_metrics.add_dimension(**dimension)
+ isolated_metrics.add_metadata(**metadata)
+
+ # WHEN we nest log_metrics to serialize
+ # and flush all metrics at the end of a function execution
+ @isolated_metrics.log_metrics
+ @my_metrics.log_metrics
+ def lambda_handler(evt, ctx):
+ pass
+
+ lambda_handler({}, {})
+
+ output = capture_metrics_output_multiple_emf_objects(capsys)
+ assert len(output) == 2
diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py
index 123c2fdbcc2..c5e65c158be 100644
--- a/tests/functional/test_utilities_parameters.py
+++ b/tests/functional/test_utilities_parameters.py
@@ -1,10 +1,12 @@
+from __future__ import annotations
+
import base64
import json
import random
import string
from datetime import datetime, timedelta
from io import BytesIO
-from typing import Dict
+from typing import Any, Dict, List, Tuple
import boto3
import pytest
@@ -14,7 +16,12 @@
from botocore.response import StreamingBody
from aws_lambda_powertools.utilities import parameters
-from aws_lambda_powertools.utilities.parameters.base import BaseProvider, ExpirableValue
+from aws_lambda_powertools.utilities.parameters.base import (
+ TRANSFORM_METHOD_MAPPING,
+ BaseProvider,
+ ExpirableValue,
+)
+from aws_lambda_powertools.utilities.parameters.ssm import SSMProvider
@pytest.fixture(scope="function")
@@ -39,6 +46,29 @@ def config():
return Config(region_name="us-east-1")
+def build_get_parameters_stub(params: Dict[str, Any], invalid_parameters: List[str] | None = None) -> Dict[str, List]:
+ invalid_parameters = invalid_parameters or []
+ version = random.randrange(1, 1000)
+ return {
+ "Parameters": [
+ {
+ "Name": param,
+ "Type": "String",
+ "Value": value,
+ "Version": version,
+ "Selector": f"{param}:{version}",
+ "SourceResult": "string",
+ "LastModifiedDate": datetime(2015, 1, 1),
+ "ARN": f"arn:aws:ssm:us-east-2:111122223333:parameter/{param.lstrip('/')}",
+ "DataType": "string",
+ }
+ for param, value in params.items()
+ if param not in invalid_parameters
+ ],
+ "InvalidParameters": invalid_parameters, # official SDK stub fails validation here, need to raise an issue
+ }
+
+
def test_dynamodb_provider_get(mock_name, mock_value, config):
"""
Test DynamoDBProvider.get() with a non-cached value
@@ -610,6 +640,169 @@ def test_ssm_provider_clear_cache(mock_name, mock_value, config):
assert provider.store == {}
+def test_ssm_provider_get_parameters_by_name_raise_on_failure(mock_name, mock_value, config):
+ # GIVEN two parameters are requested
+ provider = parameters.SSMProvider(config=config)
+ success = f"/dev/{mock_name}"
+ fail = f"/prod/{mock_name}"
+
+ params = {success: {}, fail: {}}
+ param_names = list(params.keys())
+ stub_params = {success: mock_value}
+
+ expected_stub_response = build_get_parameters_stub(params=stub_params, invalid_parameters=[fail])
+ expected_stub_params = {"Names": param_names}
+
+ stubber = stub.Stubber(provider.client)
+ stubber.add_response("get_parameters", expected_stub_response, expected_stub_params)
+ stubber.activate()
+
+ # WHEN one of them fails to be retrieved
+ # THEN raise GetParameterError
+ with pytest.raises(parameters.exceptions.GetParameterError, match=f"Failed to fetch parameters: .*{fail}.*"):
+ try:
+ provider.get_parameters_by_name(parameters=params)
+ stubber.assert_no_pending_responses()
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_get_parameters_by_name_do_not_raise_on_failure(mock_name, mock_value, config):
+ # GIVEN two parameters are requested
+ success = f"/dev/{mock_name}"
+ fail = f"/prod/{mock_name}"
+ params = {success: {}, fail: {}}
+ param_names = list(params.keys())
+ stub_params = {success: mock_value}
+
+ expected_stub_response = build_get_parameters_stub(params=stub_params, invalid_parameters=[fail])
+ expected_stub_params = {"Names": param_names}
+
+ provider = parameters.SSMProvider(config=config)
+ stubber = stub.Stubber(provider.client)
+ stubber.add_response("get_parameters", expected_stub_response, expected_stub_params)
+ stubber.activate()
+
+ # WHEN one of them fails to be retrieved
+ try:
+ ret = provider.get_parameters_by_name(parameters=params, raise_on_error=False)
+
+ # THEN there should be no error raised
+ # and failed ones available within "_errors" key
+ stubber.assert_no_pending_responses()
+ assert ret["_errors"]
+ assert len(ret["_errors"]) == 1
+ assert fail not in ret
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_get_parameters_by_name_do_not_raise_on_failure_with_decrypt(mock_name, config):
+ # GIVEN one parameter requires decryption and an arbitrary SDK error occurs
+ param = f"/{mock_name}"
+ params = {param: {"decrypt": True}}
+
+ provider = parameters.SSMProvider(config=config)
+ stubber = stub.Stubber(provider.client)
+ stubber.add_client_error("get_parameters", "InvalidKeyId")
+ stubber.activate()
+
+ # WHEN fail-fast is disabled in get_parameters_by_name
+ try:
+ ret = provider.get_parameters_by_name(parameters=params, raise_on_error=False)
+ stubber.assert_no_pending_responses()
+
+ # THEN there should be no error raised but added under `_errors` key
+ assert ret["_errors"]
+ assert len(ret["_errors"]) == 1
+ assert param not in ret
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_get_parameters_by_name_do_not_raise_on_failure_batch_decrypt_combined(
+ mock_value, mock_version, config
+):
+ # GIVEN three parameters are requested
+ # one requires decryption, two can be batched
+ # and an arbitrary SDK error is injected
+ fail = "/fail"
+ success = "/success"
+ decrypt_fail = "/fail/decrypt"
+ params = {decrypt_fail: {"decrypt": True}, success: {}, fail: {}}
+
+ expected_stub_params = {"Names": [success, fail]}
+ expected_stub_response = build_get_parameters_stub(
+ params={fail: mock_value, success: mock_value}, invalid_parameters=[fail]
+ )
+
+ provider = parameters.SSMProvider(config=config)
+ stubber = stub.Stubber(provider.client)
+ stubber.add_client_error("get_parameter")
+ stubber.add_response("get_parameters", expected_stub_response, expected_stub_params)
+ stubber.activate()
+
+ # WHEN fail-fast is disabled in get_parameters_by_name
+ # and only one parameter succeeds out of three
+ try:
+ ret = provider.get_parameters_by_name(parameters=params, raise_on_error=False)
+
+ # THEN there should be no error raised
+ # successful params returned accordingly
+ # and failed ones available within "_errors" key
+ stubber.assert_no_pending_responses()
+ assert success in ret
+ assert ret["_errors"]
+ assert len(ret["_errors"]) == 2
+ assert fail not in ret
+ assert decrypt_fail not in ret
+ finally:
+ stubber.deactivate()
+
+
+def test_ssm_provider_get_parameters_by_name_raise_on_reserved_errors_key(mock_name, mock_value, config):
+ # GIVEN one of the parameters is named `_errors`
+ success = f"/dev/{mock_name}"
+ fail = "_errors"
+
+ params = {success: {}, fail: {}}
+ provider = parameters.SSMProvider(config=config)
+
+ # WHEN using get_parameters_by_name to fetch
+ # THEN raise GetParameterError
+ with pytest.raises(parameters.exceptions.GetParameterError, match="You cannot fetch a parameter named"):
+ provider.get_parameters_by_name(parameters=params, raise_on_error=False)
+
+
+def test_ssm_provider_get_parameters_by_name_all_decrypt_should_use_get_parameters_api(mock_name, mock_value, config):
+ # GIVEN all parameters require decryption
+ param_a = f"/a/{mock_name}"
+ param_b = f"/b/{mock_name}"
+ fail = "/does_not_exist" # stub model doesn't support all-success yet
+
+ all_params = {param_a: {}, param_b: {}, fail: {}}
+ all_params_names = list(all_params.keys())
+
+ expected_param_values = {param_a: mock_value, param_b: mock_value}
+ expected_stub_response = build_get_parameters_stub(params=expected_param_values, invalid_parameters=[fail])
+ expected_stub_params = {"Names": all_params_names, "WithDecryption": True}
+
+ provider = parameters.SSMProvider(config=config)
+ stubber = stub.Stubber(provider.client)
+ stubber.add_response("get_parameters", expected_stub_response, expected_stub_params)
+ stubber.activate()
+
+ # WHEN get_parameters_by_name is called
+ # THEN we should only use GetParameters WithDecryption=true to prevent throttling
+ try:
+ ret = provider.get_parameters_by_name(parameters=all_params, decrypt=True, raise_on_error=False)
+ stubber.assert_no_pending_responses()
+
+ assert ret is not None
+ finally:
+ stubber.deactivate()
+
+
def test_dynamodb_provider_clear_cache(mock_name, mock_value, config):
# GIVEN a provider is initialized with a cached value
provider = parameters.DynamoDBProvider(table_name="test", config=config)
@@ -1518,6 +1711,167 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
assert value == mock_value
+def test_get_parameters_by_name(monkeypatch, mock_name, mock_value, config):
+ params = {mock_name: {}}
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ def get_parameters_by_name(self, *args, **kwargs) -> Dict[str, str] | Dict[str, bytes] | Dict[str, dict]:
+ return {mock_name: mock_value}
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", TestProvider())
+
+ values = parameters.get_parameters_by_name(parameters=params)
+
+ assert len(values) == 1
+ assert values[mock_name] == mock_value
+
+
+def test_get_parameters_by_name_with_decrypt_override(monkeypatch, mock_name, mock_value, config):
+ # GIVEN 2 out of 3 parameters have decrypt override
+ decrypt_param = "/api_key"
+ decrypt_param_two = "/another/secret"
+ decrypt_params = {decrypt_param: {"decrypt": True}, decrypt_param_two: {"decrypt": True}}
+ decrypted_response = "decrypted"
+ params = {mock_name: {}, **decrypt_params}
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ def _get(self, name: str, decrypt: bool = False, **sdk_options) -> str:
+ # THEN params with `decrypt` override should use GetParameter` (`_get`)
+ assert name in decrypt_params
+ assert decrypt
+ return decrypted_response
+
+ def _get_parameters_by_name(self, *args, **kwargs) -> Tuple[Dict[str, Any], List[str]]:
+ return {mock_name: mock_value}, []
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", TestProvider())
+
+ # WHEN get_parameters_by_name is called
+ values = parameters.get_parameters_by_name(parameters=params)
+
+ # THEN all parameters should be merged in the response
+ assert len(values) == 3
+ assert values[mock_name] == mock_value
+ assert values[decrypt_param] == decrypted_response
+ assert values[decrypt_param_two] == decrypted_response
+
+
+def test_get_parameters_by_name_with_override_and_explicit_global(monkeypatch, mock_name, mock_value, config):
+ # GIVEN a parameter overrides a default setting
+ default_cache_period = 500
+ params = {mock_name: {"max_age": 0}, "no-override": {}}
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ # NOTE: By convention, we check at `_get_parameters_by_name`
+ # as that's right before we call SSM, and when options have been merged
+ # def _get_parameters_by_name(self, parameters: Dict[str, Dict], raise_on_error: bool = True) -> Dict[str, Any]:
+ def _get_parameters_by_name(
+ self, parameters: Dict[str, Dict], raise_on_error: bool = True, decrypt: bool = False
+ ) -> Tuple[Dict[str, Any], List[str]]:
+ # THEN max_age should use no_cache_param override
+ assert parameters[mock_name]["max_age"] == 0
+ assert parameters["no-override"]["max_age"] == default_cache_period
+
+ return {mock_name: mock_value}, []
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", TestProvider())
+
+ # WHEN get_parameters_by_name is called with max_age set to 500 as the default
+ parameters.get_parameters_by_name(parameters=params, max_age=default_cache_period)
+
+
+def test_get_parameters_by_name_with_max_batch(monkeypatch, config):
+ # GIVEN a batch of 20 parameters
+ params = {f"param_{i}": {} for i in range(20)}
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ def _get_parameters_by_name(
+ self, parameters: Dict[str, Dict], raise_on_error: bool = True, decrypt: bool = False
+ ) -> Tuple[Dict[str, Any], List[str]]:
+ # THEN we should always split to respect GetParameters max
+ assert len(parameters) == self._MAX_GET_PARAMETERS_ITEM
+ return {}, []
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", TestProvider())
+
+ # WHEN get_parameters_by_name is called
+ parameters.get_parameters_by_name(parameters=params)
+
+
+def test_get_parameters_by_name_cache(monkeypatch, mock_name, mock_value, config):
+ # GIVEN we have a parameter to fetch but is already in cache
+ params = {mock_name: {}}
+ cache_key = (mock_name, None)
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ def _get_parameters_by_name(self, *args, **kwargs) -> Tuple[Dict[str, Any], List[str]]:
+ raise RuntimeError("Should not be called if it's in cache")
+
+ provider = TestProvider()
+ provider.add_to_cache(key=(mock_name, None), value=mock_value, max_age=10)
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", provider)
+
+ # WHEN get_parameters_by_name is called
+ provider.get_parameters_by_name(parameters=params)
+
+ # THEN the cache should be used and _get_parameters_by_name should not be called
+ assert provider.has_not_expired_in_cache(key=cache_key)
+
+
+def test_get_parameters_by_name_empty_batch(monkeypatch, config):
+ # GIVEN we have an empty dictionary
+ params = {}
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", TestProvider())
+
+ # WHEN get_parameters_by_name is called
+ # THEN it should return an empty response
+ assert parameters.get_parameters_by_name(parameters=params) == {}
+
+
+def test_get_parameters_by_name_cache_them_individually_not_batch(monkeypatch, mock_name, mock_version):
+ # GIVEN we have a parameter to fetch but is already in cache
+ dev_param = f"/dev/{mock_name}"
+ prod_param = f"/prod/{mock_name}"
+ params = {dev_param: {}, prod_param: {}}
+
+ stub_params = {dev_param: mock_value, prod_param: mock_value}
+ stub_response = build_get_parameters_stub(params=stub_params)
+
+ class FakeClient:
+ def get_parameters(self, *args, **kwargs):
+ return stub_response
+
+ provider = SSMProvider(boto3_client=FakeClient())
+ monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "ssm", provider)
+
+ # WHEN get_parameters_by_name is called
+ provider.get_parameters_by_name(parameters=params)
+
+ # THEN the cache should be populated with each parameter
+ assert len(provider.store) == len(params)
+
+
def test_get_parameter_new(monkeypatch, mock_name, mock_value):
"""
Test get_parameter() without a default provider
@@ -1584,6 +1938,27 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
assert value == mock_value
+def test_get_parameters_by_name_new(monkeypatch, mock_name, mock_value, config):
+ """
+ Test get_parameters_by_name() without a default provider
+ """
+ params = {mock_name: {}}
+
+ class TestProvider(SSMProvider):
+ def __init__(self, config: Config = config, **kwargs):
+ super().__init__(config, **kwargs)
+
+ def get_parameters_by_name(self, *args, **kwargs) -> Dict[str, str] | Dict[str, bytes] | Dict[str, dict]:
+ return {mock_name: mock_value}
+
+ monkeypatch.setattr(parameters.ssm, "DEFAULT_PROVIDERS", {})
+ monkeypatch.setattr(parameters.ssm, "SSMProvider", TestProvider)
+
+ value = parameters.get_parameters_by_name(params)
+
+ assert value[mock_name] == mock_value
+
+
def test_get_secret(monkeypatch, mock_name, mock_value):
"""
Test get_secret()
@@ -1810,6 +2185,50 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
assert value == mock_value
+def test_transform_value_auto(mock_value: str):
+ # GIVEN
+ json_data = json.dumps({"A": mock_value})
+ mock_binary = mock_value.encode()
+ binary_data = base64.b64encode(mock_binary).decode()
+
+ # WHEN
+ json_value = parameters.base.transform_value(key="/a.json", value=json_data, transform="auto")
+ binary_value = parameters.base.transform_value(key="/a.binary", value=binary_data, transform="auto")
+
+ # THEN
+ assert isinstance(json_value, dict)
+ assert isinstance(binary_value, bytes)
+ assert json_value["A"] == mock_value
+ assert binary_value == mock_binary
+
+
+def test_transform_value_auto_incorrect_key(mock_value: str):
+ # GIVEN
+ mock_key = "/missing/json/suffix"
+ json_data = json.dumps({"A": mock_value})
+
+ # WHEN
+ value = parameters.base.transform_value(key=mock_key, value=json_data, transform="auto")
+
+ # THEN it should echo back its value
+ assert isinstance(value, str)
+ assert value == json_data
+
+
+def test_transform_value_auto_unsupported_transform(mock_value: str):
+ # GIVEN
+ mock_key = "/a.does_not_exist"
+ mock_dict = {"hello": "world"}
+
+ # WHEN
+ value = parameters.base.transform_value(key=mock_key, value=mock_value, transform="auto")
+ dict_value = parameters.base.transform_value(key=mock_key, value=mock_dict, transform="auto")
+
+ # THEN it should echo back its value
+ assert value == mock_value
+ assert dict_value == mock_dict
+
+
def test_transform_value_json(mock_value):
"""
Test transform_value() with a json transform
@@ -1863,17 +2282,6 @@ def test_transform_value_binary_exception():
assert "Incorrect padding" in str(excinfo)
-def test_transform_value_wrong(mock_value):
- """
- Test transform_value() with an incorrect transform
- """
-
- with pytest.raises(parameters.TransformParameterError) as excinfo:
- parameters.base.transform_value(mock_value, "INCORRECT")
-
- assert "Invalid transform type" in str(excinfo)
-
-
def test_transform_value_ignore_error(mock_value):
"""
Test transform_value() does not raise errors when raise_on_transform_error is False
@@ -1884,16 +2292,6 @@ def test_transform_value_ignore_error(mock_value):
assert value is None
-@pytest.mark.parametrize("original_transform", ["json", "binary", "other", "Auto", None])
-def test_get_transform_method_preserve_original(original_transform):
- """
- Check if original transform method is returned for anything other than "auto"
- """
- transform = parameters.base.get_transform_method("key", original_transform)
-
- assert transform == original_transform
-
-
@pytest.mark.parametrize("extension", ["json", "binary"])
def test_get_transform_method_preserve_auto(extension, mock_name):
"""
@@ -1901,18 +2299,7 @@ def test_get_transform_method_preserve_auto(extension, mock_name):
"""
transform = parameters.base.get_transform_method(f"{mock_name}.{extension}", "auto")
- assert transform == extension
-
-
-@pytest.mark.parametrize("key", ["json", "binary", "example", "example.jsonp"])
-def test_get_transform_method_preserve_auto_unhandled(key):
- """
- Check if any key that does not end with a supported extension returns None when
- using the transform="auto"
- """
- transform = parameters.base.get_transform_method(key, "auto")
-
- assert transform is None
+ assert transform == TRANSFORM_METHOD_MAPPING[extension]
def test_base_provider_get_multiple_force_update(mock_name, mock_value):
@@ -1958,3 +2345,18 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]:
assert isinstance(value, str)
assert value == mock_value
+
+
+def test_cache_ignores_max_age_zero_or_negative(mock_value, config):
+ # GIVEN we have two parameters that shouldn't be cached
+ param = "/no_cache"
+ provider = SSMProvider(config=config)
+ cache_key = (param, None)
+
+ # WHEN a provider adds them into the cache
+ provider.add_to_cache(key=cache_key, value=mock_value, max_age=0)
+ provider.add_to_cache(key=cache_key, value=mock_value, max_age=-10)
+
+ # THEN they should not be added to the cache
+ assert len(provider.store) == 0
+ assert provider.has_not_expired_in_cache(cache_key) is False