diff --git a/.markdownlintignore b/.markdownlintignore new file mode 100644 index 00000000000..11b6d7ffe29 --- /dev/null +++ b/.markdownlintignore @@ -0,0 +1,2 @@ +docs/core/metrics/index.md +includes/abbreviations.md diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 900e0da7dd7..cb970fcfdc0 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -51,6 +51,8 @@ def lambda_handler(): service name to be used as metric dimension, by default "service_undefined" namespace : str, optional Namespace for metrics + provider: AmazonCloudWatchEMFProvider, optional + Pre-configured AmazonCloudWatchEMFProvider provider Raises ------ diff --git a/aws_lambda_powertools/metrics/provider/base.py b/aws_lambda_powertools/metrics/provider/base.py index 8bd2440658a..702b4b3d2ba 100644 --- a/aws_lambda_powertools/metrics/provider/base.py +++ b/aws_lambda_powertools/metrics/provider/base.py @@ -179,8 +179,13 @@ def handler(event, context): e Propagate error received """ + extra_args = {} - default_dimensions = kwargs.get("default_dimensions") + if kwargs.get("default_dimensions"): + extra_args.update({"default_dimensions": kwargs.get("default_dimensions")}) + + if kwargs.get("default_tags"): + extra_args.update({"default_tags": kwargs.get("default_tags")}) # If handler is None we've been called with parameters # Return a partial function with args filled @@ -190,7 +195,7 @@ def handler(event, context): self.log_metrics, capture_cold_start_metric=capture_cold_start_metric, raise_on_empty_metrics=raise_on_empty_metrics, - default_dimensions=default_dimensions, + **extra_args, ) @functools.wraps(lambda_handler) diff --git a/aws_lambda_powertools/metrics/provider/datadog/__init__.py b/aws_lambda_powertools/metrics/provider/datadog/__init__.py new file mode 100644 index 00000000000..23cb35d31eb --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/__init__.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider +from aws_lambda_powertools.metrics.provider.datadog.metrics import DatadogMetrics + +__all__ = [ + "DatadogMetrics", + "DatadogProvider", +] diff --git a/aws_lambda_powertools/metrics/provider/datadog/datadog.py b/aws_lambda_powertools/metrics/provider/datadog/datadog.py new file mode 100644 index 00000000000..6195589cd1b --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/datadog.py @@ -0,0 +1,391 @@ +from __future__ import annotations + +import json +import logging +import numbers +import os +import re +import time +import warnings +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.provider import BaseProvider +from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.functions import resolve_env_var_choice +from aws_lambda_powertools.utilities.typing import LambdaContext + +METRIC_NAME_REGEX = re.compile(r"^[a-zA-Z0-9_.]+$") + +logger = logging.getLogger(__name__) + +# Check if using datadog layer +try: + from datadog_lambda.metric import lambda_metric # type: ignore +except ImportError: # pragma: no cover + lambda_metric = None # pragma: no cover + +DEFAULT_NAMESPACE = "default" + + +class DatadogProvider(BaseProvider): + """ + DatadogProvider creates metrics asynchronously via Datadog extension or exporter. + + **Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.** + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace to be set for all metrics + + Raises + ------ + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails EMF schema validation + """ + + def __init__( + self, + metric_set: List | None = None, + namespace: str | None = None, + flush_to_log: bool | None = None, + default_tags: Dict[str, Any] | None = None, + ): + self.metric_set = metric_set if metric_set is not None else [] + self.namespace = ( + resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV)) + or DEFAULT_NAMESPACE + ) + self.default_tags = default_tags or {} + self.flush_to_log = resolve_env_var_choice(choice=flush_to_log, env=os.getenv(constants.DATADOG_FLUSH_TO_LOG)) + + # adding name,value,timestamp,tags + def add_metric( + self, + name: str, + value: float, + timestamp: int | None = None, + **tags, + ) -> None: + """ + The add_metrics function that will be used by metrics class. + + Parameters + ---------- + name: str + Name/Key for the metrics + value: float + Value for the metrics + timestamp: int + Timestamp in int for the metrics, default = time.time() + tags: List[str] + In format like List["tag:value","tag2:value2"] + args: Any + extra args will be dropped for compatibility + kwargs: Any + extra kwargs will be converted into tags, e.g., add_metrics(sales=sam) -> tags=['sales:sam'] + + Examples + -------- + >>> provider = DatadogProvider() + >>> + >>> provider.add_metric( + >>> name='coffee_house.order_value', + >>> value=12.45, + >>> tags=['product:latte', 'order:online'], + >>> sales='sam' + >>> ) + """ + + # validating metric name + if not self._validate_datadog_metric_name(name): + docs = "https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics" + raise SchemaValidationError( + f"Invalid metric name. Please ensure the metric {name} follows the requirements. \n" + f"See Datadog documentation here: \n {docs}", + ) + + # validating metric tag + self._validate_datadog_tags_name(tags) + + if not isinstance(value, numbers.Real): + raise MetricValueError(f"{value} is not a valid number") + + if not timestamp: + timestamp = int(time.time()) + + logger.debug({"details": "Appending metric", "metrics": name}) + self.metric_set.append({"m": name, "v": value, "e": timestamp, "t": tags}) + + def serialize_metric_set(self, metrics: List | None = None) -> List: + """Serializes metrics + + Example + ------- + **Serialize metrics into Datadog format** + + metrics = DatadogMetric() + # ...add metrics, tags, namespace + ret = metrics.serialize_metric_set() + + Returns + ------- + List + Serialized metrics following Datadog specification + + Raises + ------ + SchemaValidationError + Raised when serialization fail schema validation + """ + + if metrics is None: # pragma: no cover + metrics = self.metric_set + + if len(metrics) == 0: + raise SchemaValidationError("Must contain at least one metric.") + + output_list: List = [] + + logger.debug({"details": "Serializing metrics", "metrics": metrics}) + + for single_metric in metrics: + if self.namespace != DEFAULT_NAMESPACE: + metric_name = f"{self.namespace}.{single_metric['m']}" + else: + metric_name = single_metric["m"] + + output_list.append( + { + "m": metric_name, + "v": single_metric["v"], + "e": single_metric["e"], + "t": self._serialize_datadog_tags(metric_tags=single_metric["t"], default_tags=self.default_tags), + }, + ) + + return output_list + + # flush serialized data to output + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + """Manually flushes the metrics. This is normally not necessary, + unless you're running on other runtimes besides Lambda, where the @log_metrics + decorator already handles things for you. + + Parameters + ---------- + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + """ + if not raise_on_empty_metrics and len(self.metric_set) == 0: + warnings.warn( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'", + stacklevel=2, + ) + + else: + logger.debug("Flushing existing metrics") + metrics = self.serialize_metric_set() + # submit through datadog extension + if lambda_metric and not self.flush_to_log: + # use lambda_metric function from datadog package, submit metrics to datadog + for metric_item in metrics: # pragma: no cover + lambda_metric( # pragma: no cover + metric_name=metric_item["m"], + value=metric_item["v"], + timestamp=metric_item["e"], + tags=metric_item["t"], + ) + else: + # dd module not found: flush to log, this format can be recognized via datadog log forwarder + # https://github.com/Datadog/datadog-lambda-python/blob/main/datadog_lambda/metric.py#L77 + for metric_item in metrics: + print(json.dumps(metric_item, separators=(",", ":"))) + + self.clear_metrics() + + def clear_metrics(self): + logger.debug("Clearing out existing metric set from memory") + self.metric_set.clear() + + def add_cold_start_metric(self, context: LambdaContext) -> None: + """Add cold start metric and function_name dimension + + Parameters + ---------- + context : Any + Lambda context + """ + logger.debug("Adding cold start metric and function_name tagging") + self.add_metric(name="ColdStart", value=1, function_name=context.function_name) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + **kwargs, + ): + """Decorator to serialize and publish metrics at the end of a function execution. + + Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). + + Example + ------- + **Lambda function using tracer and metrics decorators** + + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + metrics = DatadogMetrics(namespace="powertools") + tracer = Tracer(service="payment") + + @tracer.capture_lambda_handler + @metrics.log_metrics + def handler(event, context): + ... + + Parameters + ---------- + lambda_handler : Callable[[Any, Any], Any], optional + lambda function handler, by default None + capture_cold_start_metric : bool, optional + captures cold start metric, by default False + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + **kwargs + + Raises + ------ + e + Propagate error received + """ + + default_tags = kwargs.get("default_tags") + + if default_tags: + self.set_default_tags(**default_tags) + + return super().log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + **kwargs, + ) + + def set_default_tags(self, **tags) -> None: + """Persist tags across Lambda invocations + + Parameters + ---------- + tags : **kwargs + tags as key=value + + Example + ------- + **Sets some default dimensions that will always be present across metrics and invocations** + + from aws_lambda_powertools import Metrics + + metrics = Metrics(namespace="ServerlessAirline", service="payment") + metrics.set_default_tags(environment="demo", another="one") + + @metrics.log_metrics() + def lambda_handler(): + return True + """ + self._validate_datadog_tags_name(tags) + self.default_tags.update(**tags) + + @staticmethod + def _serialize_datadog_tags(metric_tags: Dict[str, Any], default_tags: Dict[str, Any]) -> List[str]: + """ + Serialize metric tags into a list of formatted strings for Datadog integration. + + This function takes a dictionary of metric-specific tags or default tags. + It parse these tags and converts them into a list of strings in the format "tag_key:tag_value". + + Parameters + ---------- + metric_tags: Dict[str, Any] + A dictionary containing metric-specific tags. + default_tags: Dict[str, Any] + A dictionary containing default tags applicable to all metrics. + + Returns: + ------- + List[str] + A list of formatted tag strings, each in the "tag_key:tag_value" format. + + Example: + >>> metric_tags = {'environment': 'production', 'service': 'web'} + >>> serialize_datadog_tags(metric_tags, None) + ['environment:production', 'service:web'] + """ + tags = metric_tags or default_tags + + return [f"{tag_key}:{tag_value}" for tag_key, tag_value in tags.items()] + + @staticmethod + def _validate_datadog_tags_name(tags: Dict): + """ + Validate a metric tag according to specific requirements. + + Metric tags must start with a letter. + Metric tags must not exceed 200 characters. Fewer than 100 is preferred from a UI perspective. + + More information here: https://docs.datadoghq.com/getting_started/tagging/#define-tags + + Parameters: + ---------- + tags: Dict + The metric tags to be validated. + """ + for tag_key, tag_value in tags.items(): + tag = f"{tag_key}:{tag_value}" + if not tag[0].isalpha() or len(tag) > 200: + docs = "https://docs.datadoghq.com/getting_started/tagging/#define-tags" + warnings.warn( + f"Invalid tag value. Please ensure the specific tag {tag} follows the requirements. \n" + f"May incur data loss for metrics. \n" + f"See Datadog documentation here: \n {docs}", + DatadogDataValidationWarning, + stacklevel=2, + ) + + @staticmethod + def _validate_datadog_metric_name(metric_name: str) -> bool: + """ + Validate a metric name according to specific requirements. + + Metric names must start with a letter. + Metric names must only contain ASCII alphanumerics, underscores, and periods. + Other characters, including spaces, are converted to underscores. + Unicode is not supported. + Metric names must not exceed 200 characters. Fewer than 100 is preferred from a UI perspective. + + More information here: https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics + + Parameters: + ---------- + metric_name: str + The metric name to be validated. + + Returns: + ------- + bool + True if the metric name is valid, False otherwise. + """ + + # Check if the metric name starts with a letter + # Check if the metric name contains more than 200 characters + # Check if the resulting metric name only contains ASCII alphanumerics, underscores, and periods + if not metric_name[0].isalpha() or len(metric_name) > 200 or not METRIC_NAME_REGEX.match(metric_name): + return False + + return True diff --git a/aws_lambda_powertools/metrics/provider/datadog/metrics.py b/aws_lambda_powertools/metrics/provider/datadog/metrics.py new file mode 100644 index 00000000000..3ee4dc2f835 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/metrics.py @@ -0,0 +1,126 @@ +# NOTE: keeps for compatibility +from __future__ import annotations + +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider + + +class DatadogMetrics: + """ + DatadogProvider creates metrics asynchronously via Datadog extension or exporter. + + **Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.** + + Example + ------- + **Creates a few metrics and publish at the end of a function execution** + + from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + metrics = DatadogMetrics(namespace="ServerlessAirline") + + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + return True + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace + + Parameters + ---------- + flush_to_log : bool, optional + Used when using export instead of Lambda Extension + namespace : str, optional + Namespace for metrics + provider: DatadogProvider, optional + Pre-configured DatadogProvider provider + + Raises + ------ + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails Datadog schema validation + """ + + # NOTE: We use class attrs to share metrics data across instances + # this allows customers to initialize Metrics() throughout their code base (and middlewares) + # and not get caught by accident with metrics data loss, or data deduplication + # e.g., m1 and m2 add metric ProductCreated, however m1 has 'version' dimension but m2 doesn't + # Result: ProductCreated is created twice as we now have 2 different EMF blobs + _metrics: List = [] + _default_tags: Dict[str, Any] = {} + + def __init__( + self, + namespace: str | None = None, + flush_to_log: bool | None = None, + provider: DatadogProvider | None = None, + ): + self.metric_set = self._metrics + self.default_tags = self._default_tags + + if provider is None: + self.provider = DatadogProvider( + namespace=namespace, + flush_to_log=flush_to_log, + metric_set=self.metric_set, + ) + else: + self.provider = provider + + def add_metric( + self, + name: str, + value: float, + timestamp: int | None = None, + **tags: Any, + ) -> None: + self.provider.add_metric(name=name, value=value, timestamp=timestamp, **tags) + + def serialize_metric_set(self, metrics: List | None = None) -> List: + return self.provider.serialize_metric_set(metrics=metrics) + + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + self.provider.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + default_tags: Dict[str, Any] | None = None, + ): + return self.provider.log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + default_tags=default_tags, + ) + + def set_default_tags(self, **tags) -> None: + self.provider.set_default_tags(**tags) + self.default_tags.update(**tags) + + def clear_metrics(self) -> None: + self.provider.clear_metrics() + + def clear_default_tags(self) -> None: + self.provider.default_tags.clear() + self.default_tags.clear() + + # We now allow customers to bring their own instance + # of the DatadogProvider provider + # So we need to define getter/setter for namespace property + # To access this attribute on the provider instance. + @property + def namespace(self): + return self.provider.namespace + + @namespace.setter + def namespace(self, namespace): + self.provider.namespace = namespace diff --git a/aws_lambda_powertools/metrics/provider/datadog/warnings.py b/aws_lambda_powertools/metrics/provider/datadog/warnings.py new file mode 100644 index 00000000000..accf19526e7 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/warnings.py @@ -0,0 +1,8 @@ +class DatadogDataValidationWarning(Warning): + message: str + + def __init__(self, message: str): + self.message = message + + def __str__(self) -> str: + return self.message diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index 0cde7582976..20a7fbf47d2 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -10,6 +10,8 @@ METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE" +DATADOG_FLUSH_TO_LOG: str = "DD_FLUSH_TO_LOG" + SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" LAMBDA_TASK_ROOT_ENV: str = "LAMBDA_TASK_ROOT" diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 2fd4cfc98d1..31b4ea99ce7 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -1,5 +1,5 @@ --- -title: Metrics +title: Amazon CloudWatch EMF Metrics description: Core utility --- @@ -16,7 +16,7 @@ These metrics can be visualized through [Amazon CloudWatch Console](https://cons ## Terminologies -If you're new to Amazon CloudWatch, there are two terminologies you must be aware of before using this utility: +If you're new to Amazon CloudWatch, there are five terminologies you must be aware of before using this utility: * **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. * **Dimensions**. Metrics metadata in key-value format. They help you slice and dice metrics visualization, for example `ColdStart` metric by Payment `service`. @@ -197,9 +197,9 @@ This has the advantage of keeping cold start metric separate from your applicati The following environment variable is available to configure Metrics at a global scope: -| Setting | Description | Environment variable | Default | -|--------------------|------------------------------------------------------------------------------|-----------------------------------------|---------| -| **Namespace Name** | Sets namespace used for metrics. | `POWERTOOLS_METRICS_NAMESPACE` | `None` | +| Setting | Description | Environment variable | Default | +| ------------------ | -------------------------------- | ------------------------------ | ------- | +| **Namespace Name** | Sets namespace used for metrics. | `POWERTOOLS_METRICS_NAMESPACE` | `None` | `POWERTOOLS_METRICS_NAMESPACE` is also available on a per-instance basis with the `namespace` parameter, which will consequently override the environment variable value. @@ -261,7 +261,7 @@ By default it will skip all previously defined dimensions including default dime ### Flushing metrics manually -If you are using the AWS Lambda Web Adapter project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. +If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. ???+ warning This does not capture Cold Start metrics, and metric data validation still applies. @@ -286,9 +286,9 @@ You can use `EphemeralMetrics` class when looking to isolate multiple instances `EphemeralMetrics` has only one difference while keeping nearly the exact same set of features: -| Feature | Metrics | EphemeralMetrics | -| ----------------------------------------------------------------------------------------------------------- | ------- | ---------------- | -| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - | +| Feature | Metrics | EphemeralMetrics | +| --------------------------------------------------------------------- | ------- | ---------------- | +| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - | !!! question "Why not changing the default `Metrics` behaviour to not share data across instances?" @@ -327,6 +327,20 @@ These issues are exacerbated when you create **(A)** metric dimensions condition That is why `Metrics` shares data across instances by default, as that covers 80% of use cases and different personas using Powertools. This allows them to instantiate `Metrics` in multiple places throughout their code - be a separate file, a middleware, or an abstraction that sets default dimensions. +### Observability providers + +> An observability provider is an [AWS Lambda Partner](https://docs.aws.amazon.com/lambda/latest/dg/extensions-api-partners.html){target="_blank" rel="nofollow"} offering a platform for logging, metrics, traces, etc. + +We provide a thin-wrapper on top of the most requested observability providers. We strive to keep a similar UX as close as possible while keeping our value add features. + +!!! tip "Missing your preferred provider? Please create a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank"}." + +Current providers: + +| Provider | Notes | +| ------------------------------------- | -------------------------------------------------------- | +| [Datadog](./datadog){target="_blank"} | Uses Datadog SDK and Datadog Lambda Extension by default | + ## Testing your code ### Setting environment variables @@ -384,4 +398,4 @@ You can read standard output and assert whether metrics have been flushed. Here' ``` ???+ tip - For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/test_metrics.py){target="_blank"} + For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/metrics/test_metrics_cloudwatch_emf.py){target="_blank"} diff --git a/docs/core/metrics/datadog.md b/docs/core/metrics/datadog.md new file mode 100644 index 00000000000..fb5927b3a63 --- /dev/null +++ b/docs/core/metrics/datadog.md @@ -0,0 +1,259 @@ +--- +title: Datadog +description: Metrics provider +--- + + +This observability provider creates custom metrics by flushing metrics to [Datadog Lambda extension](https://docs.datadoghq.com/serverless/installation/python/?tab=datadogcli){target="_blank" rel="nofollow"}, or to standard output via [Datadog Forwarder](https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation){target="_blank" rel="nofollow"}. These metrics can be visualized in the [Datadog console](https://app.datadoghq.com/metric/explore){target="_blank" rel="nofollow"}. + + +```mermaid +stateDiagram-v2 + direction LR + LambdaFn: Your Lambda function + LambdaCode: DatadogMetrics + DatadogSDK: Datadog SDK + DatadogExtension: Datadog Lambda Extension + Datadog: Datadog Dashboard + LambdaExtension: Lambda Extension + + LambdaFn --> LambdaCode + LambdaCode --> DatadogSDK + DatadogSDK --> DatadogExtension + + state LambdaExtension { + DatadogExtension --> Datadog: async + } + +``` + +## Key features + +* Flush metrics to Datadog extension or standard output +* Validate against common metric definitions mistakes +* Support to add default tags + +## Terminologies + +If you're new to Datadog Metrics, there are three terminologies you must be aware of before using this utility: + +* **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. +* **Metric**. It's the name of the metric, for example: SuccessfulBooking or UpdatedBooking. +* **Tags**. Metrics metadata in key-value pair format. They help provide contextual information, and filter org organize metrics. + +You can read more details in the [Datadog official documentation](https://docs.datadoghq.com/metrics/custom_metrics/){target="_blank" rel="nofollow"}. + +## Getting started + +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank" }. + +### Install + +> **Using Datadog Forwarder?** You can skip this step. + +We recommend using [Datadog SDK](https://docs.datadoghq.com/serverless/installation/python/){target="_blank" rel="nofollow"} and Datadog Lambda Extension with this feature for optimal results. + +For Datadog SDK, you can add `aws-lambda-powertools[datadog]` as a dependency in your preferred tool, or as a Lambda Layer in the following example: + +```yaml hl_lines="15-16 28 32" title="AWS Serverless Application Model (SAM) example" +--8<-- "examples/metrics_datadog/sam/template.yaml" +``` + +### Creating metrics + +You can create metrics using `add_metric`. + +By default, we will generate the current timestamp for you. Alternatively, you can use the `timestamp` parameter to set a custom one in epoch time. + +=== "add_datadog_metrics.py" + + ```python hl_lines="4 7 9" + --8<-- "examples/metrics_datadog/src/add_datadog_metrics.py" + ``` + +=== "add_metrics_with_timestamp.py" + + ```python hl_lines="11" + --8<-- "examples/metrics_datadog/src/add_metrics_with_timestamp.py" + ``` + +???+ warning "Warning: Do not create metrics outside the handler" + Metrics added in the global scope will only be added during cold start. Disregard if you that's the intended behavior. + +### Adding tags + +You can add any number of tags to your metrics via keyword arguments (`key=value`). They are helpful to filter, organize, and aggregate your metrics later. + +!!! info "We will emit a warning for tags [beyond the 200 chars limit](https://docs.datadoghq.com/getting_started/tagging/){target="_blank" rel="nofollow"}." + +=== "add_metrics_with_tags.py" + + ```python hl_lines="9" + --8<-- "examples/metrics_datadog/src/add_metrics_with_tags.py" + ``` + +### Adding default tags + +You can persist tags across Lambda invocations and `DatadogMetrics` instances via `set_default_tags` method, or `default_tags` parameter in the `log_metrics` decorator. + +If you'd like to remove them at some point, you can use the `clear_default_tags` method. + +???+ note "Metric tag takes precedence over default tags of the same name" + When adding tags with the same name via `add_metric` and `set_default_tags`, `add_metric` takes precedence. + +=== "set_default_tags.py" + + ```python hl_lines="5" + --8<-- "examples/metrics_datadog/src/set_default_tags.py" + ``` + +=== "set_default_tags_log_metrics.py" + + ```python hl_lines="6 9" + --8<-- "examples/metrics_datadog/src/set_default_tags_log_metrics.py" + ``` + +### Flushing metrics + +Use `log_metrics` decorator to automatically serialize and flush your metrics (SDK or Forwarder) at the end of your invocation. + +This decorator also ensures metrics are flushed in the event of an exception, including warning you in case you forgot to add metrics. + +=== "add_metrics.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/add_metrics_with_tags.py" + ``` + +=== "log_metrics_output.json" + + ```json hl_lines="2 6 7" + --8<-- "examples/metrics_datadog/src/log_metrics_output.json" + ``` + +#### Raising SchemaValidationError on empty metrics + +Use `raise_on_empty_metrics=True` if you want to ensure at least one metric is always emitted. + +```python hl_lines="7" title="Failing fast if no metrics are added" +--8<-- "examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py" +``` + +???+ tip "Suppressing warning messages on empty metrics" + If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No metrics to publish*")`**. + +### Capturing cold start metric + +You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. + +=== "capture_cold_start_metric.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/capture_cold_start_datadog_metric.py" + ``` + +=== "capture_cold_start_metric_output.json" + + ```json hl_lines="2 6" + --8<-- "examples/metrics_datadog/src/capture_cold_start_metric_output.json" + ``` + +If it's a cold start invocation, this feature will: + +* Create a separate Datadog metric solely containing a metric named `ColdStart` +* Add `function_name` metric tag + +This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated tags. + +???+ info + We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=){target="_blank"} if you'd prefer a flag to override it. + +### Environment variables + +You can use any of the following environment variables to configure `DatadogMetrics`: + +| Setting | Description | Environment variable | Constructor parameter | +| -------------------- | -------------------------------------------------------------------------------- | ------------------------------ | --------------------- | +| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` | +| **Flush to log** | Use this when you want to flush metrics to be exported through Datadog Forwarder | `DD_FLUSH_TO_LOG` | `flush_to_log` | + +## Advanced + +### Flushing metrics manually + +If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. + +???+ warning + This does not capture Cold Start metrics, and metric data validation still applies. + +Contrary to the `log_metrics` decorator, you are now also responsible to flush metrics in the event of an exception. + +```python hl_lines="17" title="Manually flushing and clearing metrics from memory" +--8<-- "examples/metrics_datadog/src/flush_datadog_metrics.py" +``` + +### Integrating with Datadog Forwarder + +Use `flush_to_log=True` in `DatadogMetrics` to integrate with the legacy [Datadog Forwarder](https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation){target="_blank" rel="nofollow"}. + +This will serialize and flush metrics to standard output. + +=== "flush_metrics_to_standard_output.py" + + ```python hl_lines="4" + --8<-- "examples/metrics_datadog/src/flush_metrics_to_standard_output.py" + ``` + +=== "log_metrics_standard_output.json" + + ```json + --8<-- "examples/metrics_datadog/src/log_metrics_standard_output.json" + ``` + +## Testing your code + +### Setting environment variables + +???+ tip + Ignore this section, if: + + * You are explicitly setting namespace via `namespace` parameter + * You're not instantiating `DatadogMetrics` in the global namespace + + For example, `DatadogMetrics(namespace="ServerlessAirline")` + +Make sure to set `POWERTOOLS_METRICS_NAMESPACE` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests or via pytest plugins like [dotenv](https://pypi.org/project/pytest-dotenv/){target="_blank" rel="nofollow"}. + +```bash title="Injecting dummy metric namespace before running tests" +--8<-- "examples/metrics_datadog/src/run_tests_env_var.sh" +``` + +1. **`DD_FLUSH_TO_LOG=True`** makes it easier to test by flushing final metrics to standard output. + +### Clearing metrics + +`DatadogMetrics` keep metrics in memory across multiple instances. If you need to test this behavior, you can use the following Pytest fixture to ensure metrics are reset incl. cold start: + +```python title="Clearing metrics between tests" +--8<-- "examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py" +``` + +### Functional testing + +You can read standard output and assert whether metrics have been flushed. Here's an example using `pytest` with `capsys` built-in fixture: + +=== "assert_single_datadog_metric.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/assert_single_datadog_metric.py" + ``` + +=== "add_datadog_metrics.py" + + ```python + --8<-- "examples/metrics_datadog/src/add_datadog_metrics.py" + ``` + +???+ tip + For more elaborate assertions and comparisons, check out [our functional testing for DatadogMetrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/metrics/test_metrics_datadog.py){target="_blank"} diff --git a/docs/core/metrics/index.md b/docs/core/metrics/index.md new file mode 100644 index 00000000000..359ce28eb33 --- /dev/null +++ b/docs/core/metrics/index.md @@ -0,0 +1,6 @@ +--- +title: Metrics +description: Core utility +--- + +--8<-- "docs/core/metrics.md" diff --git a/docs/index.md b/docs/index.md index 54a0f2c58ad..4ea82dd127c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -673,7 +673,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch ## Quick getting started ```bash title="Hello world example using SAM CLI" -sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.10 --no-tracing +sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.11 --no-tracing ``` ## Features diff --git a/examples/batch_processing/sam/dynamodb_batch_processing.yaml b/examples/batch_processing/sam/dynamodb_batch_processing.yaml index 2ed70d65a86..4e436c083e5 100644 --- a/examples/batch_processing/sam/dynamodb_batch_processing.yaml +++ b/examples/batch_processing/sam/dynamodb_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/sam/kinesis_batch_processing.yaml b/examples/batch_processing/sam/kinesis_batch_processing.yaml index 314d4f8c98f..6c80bd2f333 100644 --- a/examples/batch_processing/sam/kinesis_batch_processing.yaml +++ b/examples/batch_processing/sam/kinesis_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/sam/sqs_batch_processing.yaml b/examples/batch_processing/sam/sqs_batch_processing.yaml index 77871c3478b..2dd827107d4 100644 --- a/examples/batch_processing/sam/sqs_batch_processing.yaml +++ b/examples/batch_processing/sam/sqs_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/idempotency/templates/sam.yaml b/examples/idempotency/templates/sam.yaml index 8443a0914d7..7c2f65a6a4d 100644 --- a/examples/idempotency/templates/sam.yaml +++ b/examples/idempotency/templates/sam.yaml @@ -17,7 +17,7 @@ Resources: HelloWorldFunction: Type: AWS::Serverless::Function Properties: - Runtime: python3.10 + Runtime: python3.11 Handler: app.py Policies: - Statement: diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml index a72b96f32e2..ddaa2f16407 100644 --- a/examples/logger/sam/template.yaml +++ b/examples/logger/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml index 50a2964bc4b..ace4c71f2e1 100644 --- a/examples/metrics/sam/template.yaml +++ b/examples/metrics/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/metrics/src/clear_metrics_in_tests.py b/examples/metrics/src/clear_metrics_in_tests.py index cea3879af83..a5462d3d9e1 100644 --- a/examples/metrics/src/clear_metrics_in_tests.py +++ b/examples/metrics/src/clear_metrics_in_tests.py @@ -1,7 +1,7 @@ import pytest from aws_lambda_powertools import Metrics -from aws_lambda_powertools.metrics import metrics as metrics_global +from aws_lambda_powertools.metrics.provider import cold_start @pytest.fixture(scope="function", autouse=True) @@ -9,6 +9,6 @@ def reset_metric_set(): # Clear out every metric data prior to every test metrics = Metrics() metrics.clear_metrics() - metrics_global.is_cold_start = True # ensure each test has cold start + cold_start.is_cold_start = True # ensure each test has cold start metrics.clear_default_dimensions() # remove persisted default dimensions, if any yield diff --git a/examples/metrics_datadog/sam/template.yaml b/examples/metrics_datadog/sam/template.yaml new file mode 100644 index 00000000000..39c8883c150 --- /dev/null +++ b/examples/metrics_datadog/sam/template.yaml @@ -0,0 +1,39 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Powertools for AWS Lambda (Python) version + +Globals: + Function: + Timeout: 5 + Runtime: python3.11 + Tracing: Active + Environment: + Variables: + POWERTOOLS_METRICS_NAMESPACE: ServerlessAirline + # [Production setup] + # DATADOG_API_KEY_SECRET_ARN: "" + # [Development only] + DD_API_KEY: "" + # Configuration details: https://docs.datadoghq.com/serverless/installation/python/?tab=datadogcli + DD_SITE: datadoghq.com + + Layers: + # Find the latest Layer version in the official documentation + # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 + # Find the latest Layer version in the Datadog official documentation + + # Datadog SDK + # Latest versions: https://github.com/DataDog/datadog-lambda-python/releases + - !Sub arn:aws:lambda:${AWS::Region}:464622532012:layer:Datadog-Python310:78 + + # Datadog Lambda Extension + # Latest versions: https://github.com/DataDog/datadog-lambda-extension/releases + - !Sub arn:aws:lambda:${AWS::Region}:464622532012:layer:Datadog-Extension:45 + +Resources: + CaptureLambdaHandlerExample: + Type: AWS::Serverless::Function + Properties: + CodeUri: ../src + Handler: capture_lambda_handler.handler diff --git a/examples/metrics_datadog/src/add_datadog_metrics.py b/examples/metrics_datadog/src/add_datadog_metrics.py new file mode 100644 index 00000000000..6fe6774152e --- /dev/null +++ b/examples/metrics_datadog/src/add_datadog_metrics.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/add_metrics_with_tags.py b/examples/metrics_datadog/src/add_metrics_with_tags.py new file mode 100644 index 00000000000..9ebb0680c13 --- /dev/null +++ b/examples/metrics_datadog/src/add_metrics_with_tags.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1, tag1="powertools", tag2="python") diff --git a/examples/metrics_datadog/src/add_metrics_with_timestamp.py b/examples/metrics_datadog/src/add_metrics_with_timestamp.py new file mode 100644 index 00000000000..b2bef65e9ab --- /dev/null +++ b/examples/metrics_datadog/src/add_metrics_with_timestamp.py @@ -0,0 +1,11 @@ +import time + +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1, timestamp=int(time.time())) diff --git a/examples/metrics_datadog/src/assert_single_datadog_metric.py b/examples/metrics_datadog/src/assert_single_datadog_metric.py new file mode 100644 index 00000000000..7b6ebf0909b --- /dev/null +++ b/examples/metrics_datadog/src/assert_single_datadog_metric.py @@ -0,0 +1,9 @@ +import add_datadog_metrics + + +def test_log_metrics(capsys): + add_datadog_metrics.lambda_handler({}, {}) + + log = capsys.readouterr().out.strip() # remove any extra line + + assert "SuccessfulBooking" in log # basic string assertion in JSON str diff --git a/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py b/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py new file mode 100644 index 00000000000..ec8c2fc1e19 --- /dev/null +++ b/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics(capture_cold_start_metric=True) +def lambda_handler(event: dict, context: LambdaContext): + return diff --git a/examples/metrics_datadog/src/capture_cold_start_metric_output.json b/examples/metrics_datadog/src/capture_cold_start_metric_output.json new file mode 100644 index 00000000000..ee7da985f66 --- /dev/null +++ b/examples/metrics_datadog/src/capture_cold_start_metric_output.json @@ -0,0 +1,8 @@ +{ + "m":"ColdStart", + "v":1, + "e":1691707488, + "t":[ + "function_name:HelloWorldFunction" + ] + } diff --git a/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py b/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py new file mode 100644 index 00000000000..e80552eba83 --- /dev/null +++ b/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py @@ -0,0 +1,13 @@ +import pytest + +from aws_lambda_powertools.metrics.provider import cold_start +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + # Clear out every metric data prior to every test + metrics = DatadogMetrics() + metrics.clear_metrics() + cold_start.is_cold_start = True # ensure each test has cold start + yield diff --git a/examples/metrics_datadog/src/flush_datadog_metrics.py b/examples/metrics_datadog/src/flush_datadog_metrics.py new file mode 100644 index 00000000000..89e02fc2f3f --- /dev/null +++ b/examples/metrics_datadog/src/flush_datadog_metrics.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +def book_flight(flight_id: str, **kwargs): + # logic to book flight + ... + metrics.add_metric(name="SuccessfulBooking", value=1) + + +def lambda_handler(event: dict, context: LambdaContext): + try: + book_flight(flight_id=event.get("flight_id", "")) + finally: + metrics.flush_metrics() diff --git a/examples/metrics_datadog/src/flush_metrics_to_standard_output.py b/examples/metrics_datadog/src/flush_metrics_to_standard_output.py new file mode 100644 index 00000000000..a58fe877925 --- /dev/null +++ b/examples/metrics_datadog/src/flush_metrics_to_standard_output.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics(flush_to_log=True) + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/log_metrics_output.json b/examples/metrics_datadog/src/log_metrics_output.json new file mode 100644 index 00000000000..782cea9dc4f --- /dev/null +++ b/examples/metrics_datadog/src/log_metrics_output.json @@ -0,0 +1,9 @@ +{ + "m":"SuccessfulBooking", + "v":1, + "e":1691707076, + "t":[ + "tag1:powertools", + "tag2:python" + ] +} diff --git a/examples/metrics_datadog/src/log_metrics_standard_output.json b/examples/metrics_datadog/src/log_metrics_standard_output.json new file mode 100644 index 00000000000..35fcb8a096a --- /dev/null +++ b/examples/metrics_datadog/src/log_metrics_standard_output.json @@ -0,0 +1,8 @@ +{ + "m":"SuccessfulBooking", + "v":1, + "e":1691768022, + "t":[ + + ] +} diff --git a/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py b/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py new file mode 100644 index 00000000000..2242b1dfe06 --- /dev/null +++ b/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics(raise_on_empty_metrics=True) # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + # no metrics being created will now raise SchemaValidationError + return diff --git a/examples/metrics_datadog/src/run_tests_env_var.sh b/examples/metrics_datadog/src/run_tests_env_var.sh new file mode 100644 index 00000000000..5663afd3ba4 --- /dev/null +++ b/examples/metrics_datadog/src/run_tests_env_var.sh @@ -0,0 +1 @@ +POWERTOOLS_METRICS_NAMESPACE="ServerlessAirline" DD_FLUSH_TO_LOG="True" python -m pytest # (1)! diff --git a/examples/metrics_datadog/src/set_default_tags.py b/examples/metrics_datadog/src/set_default_tags.py new file mode 100644 index 00000000000..94d4335b212 --- /dev/null +++ b/examples/metrics_datadog/src/set_default_tags.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() +metrics.set_default_tags(tag1="powertools", tag2="python") + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/set_default_tags_log_metrics.py b/examples/metrics_datadog/src/set_default_tags_log_metrics.py new file mode 100644 index 00000000000..c276c1d53ff --- /dev/null +++ b/examples/metrics_datadog/src/set_default_tags_log_metrics.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + +default_tags = {"tag1": "powertools", "tag2": "python"} + + +@metrics.log_metrics(default_tags=default_tags) # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml index 3eb6ef0acd0..d9e7d8a29da 100644 --- a/examples/tracer/sam/template.yaml +++ b/examples/tracer/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/includes/abbreviations.md b/includes/abbreviations.md new file mode 100644 index 00000000000..ed52b93fe64 --- /dev/null +++ b/includes/abbreviations.md @@ -0,0 +1 @@ +*[observability provider]: An AWS Lambda Observability Partner diff --git a/mkdocs.yml b/mkdocs.yml index 49bf5a347e5..1b9f4545239 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -15,7 +15,10 @@ nav: - Features: - core/tracer.md - core/logger.md - - core/metrics.md + - Metrics: + - core/metrics/index.md + - Amazon CloudWatch EMF: core/metrics.md + - Datadog: core/metrics/datadog.md - Event Handler: - core/event_handler/api_gateway.md - core/event_handler/appsync.md @@ -57,7 +60,6 @@ theme: features: - header.autohide - navigation.sections - - navigation.expand - navigation.top - navigation.instant - navigation.indexes @@ -73,6 +75,7 @@ theme: markdown_extensions: - admonition + - abbr - pymdownx.tabbed: alternate_style: true - pymdownx.highlight: @@ -82,6 +85,8 @@ markdown_extensions: base_path: "." check_paths: true restrict_base_path: false + auto_append: + - includes/abbreviations.md - meta - toc: permalink: true diff --git a/poetry.lock b/poetry.lock index d9a31ed61f7..a00522c4485 100644 --- a/poetry.lock +++ b/poetry.lock @@ -93,17 +93,17 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.89.0a0" +version = "2.91.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.89.0a0.tar.gz", hash = "sha256:8300431d4ef9d869066ad5dba955a6b9eca4825eb4ffcdb03d9ce34f82509d6a"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.89.0a0-py3-none-any.whl", hash = "sha256:64a84542822bd085b03ac40e39f15c3fee1aaf649a0df34ecf0f288f7bc84c78"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.91.0a0.tar.gz", hash = "sha256:a7b0e78862f3dd81cf13740df2ecda1c877545500872dc476f2dbf3807632a32"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:e3d606055c2fe268d80f96052b583060a25fadcdee79d89a75f2eac4354f2e69"}, ] [package.dependencies] -aws-cdk-lib = "2.89.0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -111,18 +111,18 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.89.0a0" +version = "2.91.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.89.0a0.tar.gz", hash = "sha256:efa23f021efdca83f037569d41d7e96023c3750417fc976023688397f7f57715"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.89.0a0-py3-none-any.whl", hash = "sha256:7b56ea2889e8a340bfd4feb67f0798827bf58090d368763a59cd0223fe2dd916"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.91.0a0.tar.gz", hash = "sha256:cafd747af66f92755f188172f0e892503bc73c26f0d6d95e5f733c67b0307fa8"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:972393ad1c220708616322946ba3f8936cbe143a69e543762295c1ea02d69849"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.89.0.a0" -aws-cdk-lib = "2.89.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -130,18 +130,18 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.89.0a0" +version = "2.91.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.89.0a0.tar.gz", hash = "sha256:81469d688a611d9ab10d528923692eba685cbb04a5d3401c02a4530b001a6a77"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.89.0a0-py3-none-any.whl", hash = "sha256:3367cf5fa8e4bb1939fcd60e919af00ecc6d97a1d046938af25b9c5bef26b4c1"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.91.0a0.tar.gz", hash = "sha256:db607df2563f0b839795a41218a59e3ebc29e906dd08aed7b0b59aceba0bde02"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:34d0f103846613a72cfae8419be2e4302863a1e8f6e81951b0a51c2f62ab80b3"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.89.0.a0" -aws-cdk-lib = "2.89.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -149,13 +149,13 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.89.0" +version = "2.91.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.89.0.tar.gz", hash = "sha256:8fbd1d4ee0ffeb67bcc845bef5a10575dbc678ad07f74cdb3cb4243afc433db7"}, - {file = "aws_cdk_lib-2.89.0-py3-none-any.whl", hash = "sha256:92eeebd77fe17b36029fae20f46eb601710485ea7c808c3d33fe1c71fee125bd"}, + {file = "aws-cdk-lib-2.91.0.tar.gz", hash = "sha256:1163926527a8b7da931cddea77a4824b929b3f775447c3b7427ecdef7701ce74"}, + {file = "aws_cdk_lib-2.91.0-py3-none-any.whl", hash = "sha256:ec2cadeb5727ea8259ad8a54ac9ff40502032cd2572c81f4594df93365da39da"}, ] [package.dependencies] @@ -183,13 +183,13 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.72.0" +version = "1.73.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.72.0.tar.gz", hash = "sha256:e688aac30943bfe0352147b792d8bbe7c1b5ed648747cd7ef6280875b249e2d8"}, - {file = "aws_sam_translator-1.72.0-py3-none-any.whl", hash = "sha256:69fe3914d61ae6690034c3fc1055743e5415d83c59c35ec5ec9ceb26cc65c8a1"}, + {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"}, + {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"}, ] [package.dependencies] @@ -291,17 +291,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.16" +version = "1.28.24" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.16-py3-none-any.whl", hash = "sha256:d8e31f69fb919025a5961f8fbeb51fe92e2f753beb37fc1853138667a231cdaa"}, - {file = "boto3-1.28.16.tar.gz", hash = "sha256:aea48aedf3e8676e598e3202e732295064a4fcad5f2d2d2a699368b8c3ab492c"}, + {file = "boto3-1.28.24-py3-none-any.whl", hash = "sha256:0300ca6ec8bc136eb316b32cc1e30c66b85bc497f5a5fe42e095ae4280569708"}, + {file = "boto3-1.28.24.tar.gz", hash = "sha256:9d1b4713c888e53a218648ad71522bee9bec9d83f2999fff2494675af810b632"}, ] [package.dependencies] -botocore = ">=1.31.16,<1.32.0" +botocore = ">=1.31.24,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -310,13 +310,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.16" +version = "1.31.24" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.16-py3-none-any.whl", hash = "sha256:92b240e2cb7b3afae5361651d2f48ee582f45d2dab53aef76eef7eec1d3ce582"}, - {file = "botocore-1.31.16.tar.gz", hash = "sha256:563e15979e763b93d78de58d0fc065f8615be12f41bab42f5ad9f412b6a224b3"}, + {file = "botocore-1.31.24-py3-none-any.whl", hash = "sha256:8c7ba9b09e9104e2d473214e1ffcf84b77e04cf6f5f2344942c1eed9e299f947"}, + {file = "botocore-1.31.24.tar.gz", hash = "sha256:2d8f412c67f9285219f52d5dbbb6ef0dfa9f606da29cbdd41b6d6474bcc4bbd4"}, ] [package.dependencies] @@ -327,6 +327,31 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "bytecode" +version = "0.13.0" +description = "Python module to generate and modify bytecode" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bytecode-0.13.0-py3-none-any.whl", hash = "sha256:e69f92e7d27f99d5d7d76e6a824bd3d9ff857c72b59927aaf87e1a620f67fe50"}, + {file = "bytecode-0.13.0.tar.gz", hash = "sha256:6af3c2f0a31ce05dce41f7eea5cc380e33f5e8fbb7dcee3b52467a00acd52fcd"}, +] + +[[package]] +name = "bytecode" +version = "0.14.2" +description = "Python module to generate and modify bytecode" +optional = false +python-versions = ">=3.8" +files = [ + {file = "bytecode-0.14.2-py3-none-any.whl", hash = "sha256:e368a2b9bbd7c986133c951250db94fb32f774cfc49752a9db9073bcf9899762"}, + {file = "bytecode-0.14.2.tar.gz", hash = "sha256:386378d9025d68ddb144870ae74330a492717b11b8c9164c4034e88add808f0c"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + [[package]] name = "cattrs" version = "23.1.2" @@ -598,6 +623,154 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "datadog" +version = "0.46.0" +description = "The Datadog Python library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "datadog-0.46.0-py2.py3-none-any.whl", hash = "sha256:3d7bcda6177b43be4cdb52e16b4bdd4f9005716c0dd7cfea009e018c36bb7a3d"}, + {file = "datadog-0.46.0.tar.gz", hash = "sha256:e4fbc92a85e2b0919a226896ae45fc5e4b356c0c57f1c2659659dfbe0789c674"}, +] + +[package.dependencies] +requests = ">=2.6.0" + +[[package]] +name = "datadog-lambda" +version = "4.78.0" +description = "The Datadog AWS Lambda Library" +optional = false +python-versions = ">=3.7.0,<4" +files = [ + {file = "datadog_lambda-4.78.0-py3-none-any.whl", hash = "sha256:660bae6057f3b2033b0c035e9d542af491e40f9ce57b97b4891c491262b9148c"}, + {file = "datadog_lambda-4.78.0.tar.gz", hash = "sha256:3e57faa8f80ddd43b595355b92045fde8f9ed87efe8619133e82cebb87cbe434"}, +] + +[package.dependencies] +datadog = ">=0.41.0,<1.0.0" +ddtrace = "1.15.2" +importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} +typing_extensions = {version = ">=4.0,<5.0", markers = "python_version < \"3.8\""} +urllib3 = "<2.0.0" +wrapt = ">=1.11.2,<2.0.0" + +[package.extras] +dev = ["boto3 (>=1.10.33,<2.0.0)", "flake8 (>=3.7.9,<4.0.0)", "httpretty (>=0.9.7,<0.10.0)", "nose2 (>=0.9.1,<0.10.0)", "requests (>=2.22.0,<3.0.0)"] + +[[package]] +name = "ddsketch" +version = "2.0.4" +description = "Distributed quantile sketches" +optional = false +python-versions = ">=2.7" +files = [ + {file = "ddsketch-2.0.4-py3-none-any.whl", hash = "sha256:3227a270fd686a29d3a7128f9352ccf852314410380fc11384356f1ae2a75938"}, + {file = "ddsketch-2.0.4.tar.gz", hash = "sha256:32f7314077fec8747d4faebaec2c854b5ffc399c5f552f73fa94024f48d74d64"}, +] + +[package.dependencies] +protobuf = {version = ">=3.0.0", markers = "python_version >= \"3.7\""} +six = "*" + +[[package]] +name = "ddtrace" +version = "1.15.2" +description = "Datadog APM client library" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "ddtrace-1.15.2-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:ca0411333fbdb0fafa06d412bbd76ab8d2647cc9dcb8a7833952ce4fe09eb421"}, + {file = "ddtrace-1.15.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e2603749f97a5191b32f710c8ec5248bb58f4f9a1cb337559f93c5f0f8cea33b"}, + {file = "ddtrace-1.15.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8392087809e047f701e38ecc4f2990bcfe399a22c516a1dbcbdff50fb7382a79"}, + {file = "ddtrace-1.15.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2f7649c24a7463be9b86d5f11ac6eaa2014896eaf409e67f3dc813a6bb0ed8b6"}, + {file = "ddtrace-1.15.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:dbdbc5bf3b2b56b8e61b241ee372d897b295344e269475f38e837c9bfe03ae2c"}, + {file = "ddtrace-1.15.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:23d39c72ad1844977a80d79206d773c3ec1f1346816b9e45427c25ef88597b4e"}, + {file = "ddtrace-1.15.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:da458bbbc4de14dd8e8f60aefe42a66c551a9f50c69c6e361acc7edab579a3e4"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d07bb0c50d2df7ff9281bea83534db5127cee8ac2f94111c9544d03d49f60613"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:499b3e3d0359e492792ecf8ab6efcf4b1991fbaa523338774333e9a2a66d9d37"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedd0937f83e0d7b261960365fec5771f39ced599c90f589548a1738a586799d"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7643d20991cd7e1c21e65d8b5c292a9dca8d124f69f9e96cc2b5fb8d47802c3a"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3f6bb76fe33c2e4842236036f78b1bbdd4da0f2b846627ca7d72b01ac49b3076"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ebc9b89501c8a557dab43170e4a12e90358130413a87a0276ccaa0038b0636a4"}, + {file = "ddtrace-1.15.2-cp310-cp310-win32.whl", hash = "sha256:c10ca0e3a63310d314ec7fa55d53f4b4434f06c4d321d64d757814679161bf5d"}, + {file = "ddtrace-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:6208635442ea52ff3f97b9fc64ac25772cda8f105a607a385e55bf524bceefc5"}, + {file = "ddtrace-1.15.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8524f460be02b402f63b11ad3b1177955c8608f814e1758b87f53f15bf9a7599"}, + {file = "ddtrace-1.15.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5a2dd127a65e12189055818ab72d44d80587acaaf450c65624e0482d63ff9970"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3350c647120fbc3355eb35ce054c88e63bc073d71949f377d59b1152a2ed0f4"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adb76713207f0ef688f68a539f9cb63e19cd149d48d36befb835f67f49395ed7"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8ff5c250c5abfbbbd76a7d3167308a2373ad7e55ecf3c7c26a62fcd2be8a57"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:264bed998870b63f548b96f57dd771014cd02ef0b21bb382e745900a7b72ef28"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:20bfd8db155167d2ccfddc25b50649338534b12cb00f7ed08514af1eb6a4956e"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72479172bd10f5998188a05b0b4a109ccb2a93467a0aa1e6656d5396c83fb253"}, + {file = "ddtrace-1.15.2-cp311-cp311-win32.whl", hash = "sha256:23bee3d0eb971cc1565caa429620b82f2d69ef648e9c792046b9481188dba9ab"}, + {file = "ddtrace-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:65a29d23ecfbc7cc4ca1069a5586aa836ae3978e64251414933432078bc29bc2"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:3a2852da4a76503211ca8b77a50fc86df36ba15fab04b45a6a17faa386f53839"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:244180c6decb244c7fda929dc5969b3a510e5a4857239063de1fae139fac9837"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:46f9ba0c282a62953f03d1add8eae8c80613244bb93a1ff997dad71d07ce6c72"}, + {file = "ddtrace-1.15.2-cp35-cp35m-win32.whl", hash = "sha256:a39dbf1ca657cc3a876143301e5d775e2f9bcf2ed1e9b4366fb3cf9d6a345a82"}, + {file = "ddtrace-1.15.2-cp35-cp35m-win_amd64.whl", hash = "sha256:7cfd9514e82871321e86897fe567c7548fc45da523df591f3e5adc6633a5781c"}, + {file = "ddtrace-1.15.2-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:3a2978b07d19d4ebf936fde1e455c61b3d88f103f1f9e360b9269fe1a1dc608a"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e7795a7f65a6e844ab57a0b31d400e79c4a1f69d174fab8edc69e6d2db56962"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aae5306b3b0ec48cb8ade3362629c31bd25999244addff0f4a2f6f3934509894"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14fb33bd6d9fa70638d43de7b5170e1c9961d3fbc277314609941e108c45716d"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:745ce3c9559fa331ef30208ff1ccaafe3ab3c02f2e01177c560c94acd6f4de27"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7706d35215d2cca0a89581ec11da56e25742914ae0865b928034ee9ad7278cf3"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0bc18e900d1495deb61093f1af38d94af6a1ca66dd541fd47bd093c3f3b80b4a"}, + {file = "ddtrace-1.15.2-cp36-cp36m-win32.whl", hash = "sha256:b13f4042ef3f391714aca5ca1f03ff3c24c1d201ab5af02f0405335aa5602ff5"}, + {file = "ddtrace-1.15.2-cp36-cp36m-win_amd64.whl", hash = "sha256:eb32e3b3d0f472447b3d427a075007135b3c39488c1fe0f1e097863f326a439b"}, + {file = "ddtrace-1.15.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:0953fd9a2555801d68674bb4905f64805efe1e02b3f11def21eb7655be046717"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9436ec9cc00b70730d2c1777f11aca7f4863a49ddd27d0b1478e84c1a7667b6f"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7215b21c1eaf56b38bf46c66193db3736ecadeb9ae1b9ca780a91addbaa9853"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a5f7155b99fe9393bfa4f0e4ef2610ddf59e70aefcf99a95acae8b31e29cc4"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:df103a600c2901dc54929ef58dee41887a0bb558efbf7e41a7489bd6264fcf44"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d51a73238ad8ceff4232ffa94b860d61187b325e7fab746044dafa312d6bc415"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bfc5777204c0c34465fc4ce38d8d1268d9f95ffcbf7e4025e9a5d3e87d3e17c3"}, + {file = "ddtrace-1.15.2-cp37-cp37m-win32.whl", hash = "sha256:9516dbfc974af9632d75e9c32b38e695b88ea18ebfa4580dd0f768bc05272fba"}, + {file = "ddtrace-1.15.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a510252a3d5be6c29db2c69cbd2535268532e8d568fae06b295a06041e1b969d"}, + {file = "ddtrace-1.15.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:82995243719c87aefc85d7df0e1ae61bba8ae1f805d48cbaf2132beb215f1968"}, + {file = "ddtrace-1.15.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:ca5dd51910a81451d236fccdbf5d3ca8e284aa3be56f08db92644f85ef88c56e"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d76f485769c035df3ede4ad9830bac06aa8b69ac4617f2eb1251b1094468009"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4dd5f5e477021b8810b2b685e1e16ba5a99f31239e22abc71794688b7f3e6e4d"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ec73676c60cc3cf08430f19a59daccbbb5770edc74ad15a99bf4237a40d0fb"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6b140f11b89d902174df05e8b9c1eb1b522a63e6c60c5d68ccac8913bb371bbb"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c7c8a0e140d28e49cf8cd96cdec8e17232c5525ed5c154729b8afb6cb93a8e2b"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0112d258c3a681a63e5f057b9e3ee8504b60d773d95baf195462d9ff4096caa9"}, + {file = "ddtrace-1.15.2-cp38-cp38-win32.whl", hash = "sha256:6ea7b80eb8019a70c999ef8cfd34fd6078a2ae154007d124d5e642531bf1a9d6"}, + {file = "ddtrace-1.15.2-cp38-cp38-win_amd64.whl", hash = "sha256:282b8c9b46d7a8450325299cf348a0f1d8f9f34d174a0ea402bc1a1df4ad7cf3"}, + {file = "ddtrace-1.15.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:53b171404b59c1e030ea614e194d1483fb42437a02ffdd7f4a45175613dd7cb4"}, + {file = "ddtrace-1.15.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9ba06236dd8bd64776b7b734dd9421709670fef090857448e75c97acb30cdce7"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6abe5ba4396c9f7633cab68d0e81c5fd94f7c77b046b3ee969eded068a522d7"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61a1b48f97a07e2f422ec01bb23861716300cebe4afd917ab36bb4db68904da4"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86e186dc66802e2d71b94330c1635fd4c3f881a1bb71747be162a57b7602daaa"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:762b5a0777454299c4ac62177578969ed551c973063f87a8825d9d073e5250ce"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:889d359f4382fde41893ba5c00b412cbea8502e1b6bb6c83bf87fa6e63cbfabe"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c4d3c9ce3456181e535b9da42bde7c850dc7224039fd083e95b05010c2ff9748"}, + {file = "ddtrace-1.15.2-cp39-cp39-win32.whl", hash = "sha256:69e47d28327a7afb263c16cc6bf1227e1b2bf1fdb2d559dce913a138a3f36807"}, + {file = "ddtrace-1.15.2-cp39-cp39-win_amd64.whl", hash = "sha256:da780fbfe6dd749ee571a468b8e86f1fd4f51626d35626c2356f8a440efe0dfa"}, + {file = "ddtrace-1.15.2.tar.gz", hash = "sha256:e5c1a5965ea8d8260586769102d79522bc7d9758a271252bb58ee05d6c5cd9a8"}, +] + +[package.dependencies] +attrs = {version = ">=20", markers = "python_version > \"2.7\""} +bytecode = [ + {version = ">=0.13.0,<0.14.0", markers = "python_version == \"3.7\""}, + {version = "*", markers = "python_version >= \"3.8\""}, +] +cattrs = {version = "*", markers = "python_version >= \"3.7\""} +ddsketch = ">=2.0.1" +envier = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +opentelemetry-api = {version = ">=1", markers = "python_version >= \"3.7\""} +protobuf = {version = ">=3", markers = "python_version >= \"3.7\""} +six = ">=1.12.0" +typing-extensions = "*" +xmltodict = ">=0.12" + +[package.extras] +opentracing = ["opentracing (>=2.0.0)"] + [[package]] name = "decorator" version = "5.1.1" @@ -609,6 +782,37 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "envier" +version = "0.4.0" +description = "Python application configuration via the environment" +optional = false +python-versions = ">=2.7" +files = [ + {file = "envier-0.4.0-py3-none-any.whl", hash = "sha256:7b91af0f16ea3e56d91ec082f038987e81b441fc19c657a8b8afe0909740a706"}, + {file = "envier-0.4.0.tar.gz", hash = "sha256:e68dcd1ed67d8b6313883e27dff3e701b7fba944d2ed4b7f53d0cc2e12364a82"}, +] + +[package.extras] +mypy = ["mypy"] + [[package]] name = "exceptiongroup" version = "1.1.2" @@ -1005,13 +1209,13 @@ pbr = "*" [[package]] name = "jsii" -version = "1.85.0" +version = "1.86.1" description = "Python client for jsii runtime" optional = false python-versions = "~=3.7" files = [ - {file = "jsii-1.85.0-py3-none-any.whl", hash = "sha256:379feb1a1a3c4e449307564f42a7cddef05e43760cbfbbfe8434f6448cd668a0"}, - {file = "jsii-1.85.0.tar.gz", hash = "sha256:b77194cf053c06c6bdffc887a4d1d2a41113c6f4780a7d78d70a780a70998008"}, + {file = "jsii-1.86.1-py3-none-any.whl", hash = "sha256:32eb46ed4c9a35bc92b892ef049ed1996f13be38ffef964d607e8fe930471b3e"}, + {file = "jsii-1.86.1.tar.gz", hash = "sha256:44f9a820eea92c9508693f72d3129b5a080421c949c32303f4f7b2cc98a81f59"}, ] [package.dependencies] @@ -1288,13 +1492,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.5.1" +version = "1.5.2" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.5.1-py3-none-any.whl", hash = "sha256:67e889f8d8ba1fe5decdfc59f5f8f21d6a8925a129339e93dede303bdea03a98"}, - {file = "mkdocs-1.5.1.tar.gz", hash = "sha256:f2f323c62fffdf1b71b84849e39aef56d6852b3f0a5571552bca32cefc650209"}, + {file = "mkdocs-1.5.2-py3-none-any.whl", hash = "sha256:60a62538519c2e96fe8426654a67ee177350451616118a41596ae7c876bb7eac"}, + {file = "mkdocs-1.5.2.tar.gz", hash = "sha256:70d0da09c26cff288852471be03c23f0f521fc15cf16ac89c7a3bfb9ae8d24f9"}, ] [package.dependencies] @@ -1613,6 +1817,21 @@ doc = ["nb2plots (>=0.6)", "numpydoc (>=1.1)", "pillow (>=8.2)", "pydata-sphinx- extra = ["lxml (>=4.5)", "pydot (>=1.4.1)", "pygraphviz (>=1.7)"] test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] +[[package]] +name = "opentelemetry-api" +version = "1.19.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"}, + {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<7.0" + [[package]] name = "packaging" version = "23.1" @@ -1708,6 +1927,28 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "protobuf" +version = "4.24.0" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, + {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, + {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, + {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, + {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, + {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, + {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, + {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, + {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, + {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, + {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, +] + [[package]] name = "publication" version = "0.0.3" @@ -1784,13 +2025,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -2069,99 +2310,99 @@ mando = ">=0.6,<0.7" [[package]] name = "regex" -version = "2023.6.3" +version = "2023.8.8" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.6" files = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, + {file = "regex-2023.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88900f521c645f784260a8d346e12a1590f79e96403971241e64c3a265c8ecdb"}, + {file = "regex-2023.8.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3611576aff55918af2697410ff0293d6071b7e00f4b09e005d614686ac4cd57c"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a0ccc8f2698f120e9e5742f4b38dc944c38744d4bdfc427616f3a163dd9de5"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c662a4cbdd6280ee56f841f14620787215a171c4e2d1744c9528bed8f5816c96"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf0633e4a1b667bfe0bb10b5e53fe0d5f34a6243ea2530eb342491f1adf4f739"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551ad543fa19e94943c5b2cebc54c73353ffff08228ee5f3376bd27b3d5b9800"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54de2619f5ea58474f2ac211ceea6b615af2d7e4306220d4f3fe690c91988a61"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ec4b3f0aebbbe2fc0134ee30a791af522a92ad9f164858805a77442d7d18570"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ae646c35cb9f820491760ac62c25b6d6b496757fda2d51be429e0e7b67ae0ab"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca339088839582d01654e6f83a637a4b8194d0960477b9769d2ff2cfa0fa36d2"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d9b6627408021452dcd0d2cdf8da0534e19d93d070bfa8b6b4176f99711e7f90"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:bd3366aceedf274f765a3a4bc95d6cd97b130d1dda524d8f25225d14123c01db"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7aed90a72fc3654fba9bc4b7f851571dcc368120432ad68b226bd593f3f6c0b7"}, + {file = "regex-2023.8.8-cp310-cp310-win32.whl", hash = "sha256:80b80b889cb767cc47f31d2b2f3dec2db8126fbcd0cff31b3925b4dc6609dcdb"}, + {file = "regex-2023.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:b82edc98d107cbc7357da7a5a695901b47d6eb0420e587256ba3ad24b80b7d0b"}, + {file = "regex-2023.8.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1e7d84d64c84ad97bf06f3c8cb5e48941f135ace28f450d86af6b6512f1c9a71"}, + {file = "regex-2023.8.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce0f9fbe7d295f9922c0424a3637b88c6c472b75eafeaff6f910494a1fa719ef"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06c57e14ac723b04458df5956cfb7e2d9caa6e9d353c0b4c7d5d54fcb1325c46"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7a9aaa5a1267125eef22cef3b63484c3241aaec6f48949b366d26c7250e0357"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b7408511fca48a82a119d78a77c2f5eb1b22fe88b0d2450ed0756d194fe7a9a"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14dc6f2d88192a67d708341f3085df6a4f5a0c7b03dec08d763ca2cd86e9f559"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48c640b99213643d141550326f34f0502fedb1798adb3c9eb79650b1ecb2f177"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0085da0f6c6393428bf0d9c08d8b1874d805bb55e17cb1dfa5ddb7cfb11140bf"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:964b16dcc10c79a4a2be9f1273fcc2684a9eedb3906439720598029a797b46e6"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7ce606c14bb195b0e5108544b540e2c5faed6843367e4ab3deb5c6aa5e681208"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40f029d73b10fac448c73d6eb33d57b34607f40116e9f6e9f0d32e9229b147d7"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3b8e6ea6be6d64104d8e9afc34c151926f8182f84e7ac290a93925c0db004bfd"}, + {file = "regex-2023.8.8-cp311-cp311-win32.whl", hash = "sha256:942f8b1f3b223638b02df7df79140646c03938d488fbfb771824f3d05fc083a8"}, + {file = "regex-2023.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:51d8ea2a3a1a8fe4f67de21b8b93757005213e8ac3917567872f2865185fa7fb"}, + {file = "regex-2023.8.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e951d1a8e9963ea51efd7f150450803e3b95db5939f994ad3d5edac2b6f6e2b4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704f63b774218207b8ccc6c47fcef5340741e5d839d11d606f70af93ee78e4d4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22283c769a7b01c8ac355d5be0715bf6929b6267619505e289f792b01304d898"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91129ff1bb0619bc1f4ad19485718cc623a2dc433dff95baadbf89405c7f6b57"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de35342190deb7b866ad6ba5cbcccb2d22c0487ee0cbb251efef0843d705f0d4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b993b6f524d1e274a5062488a43e3f9f8764ee9745ccd8e8193df743dbe5ee61"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3026cbcf11d79095a32d9a13bbc572a458727bd5b1ca332df4a79faecd45281c"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:293352710172239bf579c90a9864d0df57340b6fd21272345222fb6371bf82b3"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d909b5a3fff619dc7e48b6b1bedc2f30ec43033ba7af32f936c10839e81b9217"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3d370ff652323c5307d9c8e4c62efd1956fb08051b0e9210212bc51168b4ff56"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b076da1ed19dc37788f6a934c60adf97bd02c7eea461b73730513921a85d4235"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e9941a4ada58f6218694f382e43fdd256e97615db9da135e77359da257a7168b"}, + {file = "regex-2023.8.8-cp36-cp36m-win32.whl", hash = "sha256:a8c65c17aed7e15a0c824cdc63a6b104dfc530f6fa8cb6ac51c437af52b481c7"}, + {file = "regex-2023.8.8-cp36-cp36m-win_amd64.whl", hash = "sha256:aadf28046e77a72f30dcc1ab185639e8de7f4104b8cb5c6dfa5d8ed860e57236"}, + {file = "regex-2023.8.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:423adfa872b4908843ac3e7a30f957f5d5282944b81ca0a3b8a7ccbbfaa06103"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ae594c66f4a7e1ea67232a0846649a7c94c188d6c071ac0210c3e86a5f92109"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e51c80c168074faa793685656c38eb7a06cbad7774c8cbc3ea05552d615393d8"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b7f4c66aa9d1522b06e31a54f15581c37286237208df1345108fcf4e050c18"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e73e5243af12d9cd6a9d6a45a43570dbe2e5b1cdfc862f5ae2b031e44dd95a8"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941460db8fe3bd613db52f05259c9336f5a47ccae7d7def44cc277184030a116"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f0ccf3e01afeb412a1a9993049cb160d0352dba635bbca7762b2dc722aa5742a"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2e9216e0d2cdce7dbc9be48cb3eacb962740a09b011a116fd7af8c832ab116ca"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5cd9cd7170459b9223c5e592ac036e0704bee765706445c353d96f2890e816c8"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4873ef92e03a4309b3ccd8281454801b291b689f6ad45ef8c3658b6fa761d7ac"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:239c3c2a339d3b3ddd51c2daef10874410917cd2b998f043c13e2084cb191684"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1005c60ed7037be0d9dea1f9c53cc42f836188227366370867222bda4c3c6bd7"}, + {file = "regex-2023.8.8-cp37-cp37m-win32.whl", hash = "sha256:e6bd1e9b95bc5614a7a9c9c44fde9539cba1c823b43a9f7bc11266446dd568e3"}, + {file = "regex-2023.8.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9a96edd79661e93327cfeac4edec72a4046e14550a1d22aa0dd2e3ca52aec921"}, + {file = "regex-2023.8.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2181c20ef18747d5f4a7ea513e09ea03bdd50884a11ce46066bb90fe4213675"}, + {file = "regex-2023.8.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2ad5add903eb7cdde2b7c64aaca405f3957ab34f16594d2b78d53b8b1a6a7d6"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9233ac249b354c54146e392e8a451e465dd2d967fc773690811d3a8c240ac601"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920974009fb37b20d32afcdf0227a2e707eb83fe418713f7a8b7de038b870d0b"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd2b6c5dfe0929b6c23dde9624483380b170b6e34ed79054ad131b20203a1a63"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96979d753b1dc3b2169003e1854dc67bfc86edf93c01e84757927f810b8c3c93"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ae54a338191e1356253e7883d9d19f8679b6143703086245fb14d1f20196be9"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2162ae2eb8b079622176a81b65d486ba50b888271302190870b8cc488587d280"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c884d1a59e69e03b93cf0dfee8794c63d7de0ee8f7ffb76e5f75be8131b6400a"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf9273e96f3ee2ac89ffcb17627a78f78e7516b08f94dc435844ae72576a276e"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:83215147121e15d5f3a45d99abeed9cf1fe16869d5c233b08c56cdf75f43a504"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f7454aa427b8ab9101f3787eb178057c5250478e39b99540cfc2b889c7d0586"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0640913d2c1044d97e30d7c41728195fc37e54d190c5385eacb52115127b882"}, + {file = "regex-2023.8.8-cp38-cp38-win32.whl", hash = "sha256:0c59122ceccb905a941fb23b087b8eafc5290bf983ebcb14d2301febcbe199c7"}, + {file = "regex-2023.8.8-cp38-cp38-win_amd64.whl", hash = "sha256:c12f6f67495ea05c3d542d119d270007090bad5b843f642d418eb601ec0fa7be"}, + {file = "regex-2023.8.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:82cd0a69cd28f6cc3789cc6adeb1027f79526b1ab50b1f6062bbc3a0ccb2dbc3"}, + {file = "regex-2023.8.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bb34d1605f96a245fc39790a117ac1bac8de84ab7691637b26ab2c5efb8f228c"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:987b9ac04d0b38ef4f89fbc035e84a7efad9cdd5f1e29024f9289182c8d99e09"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dd6082f4e2aec9b6a0927202c85bc1b09dcab113f97265127c1dc20e2e32495"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7eb95fe8222932c10d4436e7a6f7c99991e3fdd9f36c949eff16a69246dee2dc"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7098c524ba9f20717a56a8d551d2ed491ea89cbf37e540759ed3b776a4f8d6eb"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b694430b3f00eb02c594ff5a16db30e054c1b9589a043fe9174584c6efa8033"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2aeab3895d778155054abea5238d0eb9a72e9242bd4b43f42fd911ef9a13470"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:988631b9d78b546e284478c2ec15c8a85960e262e247b35ca5eaf7ee22f6050a"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:67ecd894e56a0c6108ec5ab1d8fa8418ec0cff45844a855966b875d1039a2e34"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:14898830f0a0eb67cae2bbbc787c1a7d6e34ecc06fbd39d3af5fe29a4468e2c9"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f2200e00b62568cfd920127782c61bc1c546062a879cdc741cfcc6976668dfcf"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9691a549c19c22d26a4f3b948071e93517bdf86e41b81d8c6ac8a964bb71e5a6"}, + {file = "regex-2023.8.8-cp39-cp39-win32.whl", hash = "sha256:6ab2ed84bf0137927846b37e882745a827458689eb969028af8032b1b3dac78e"}, + {file = "regex-2023.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5543c055d8ec7801901e1193a51570643d6a6ab8751b1f7dd9af71af467538bb"}, + {file = "regex-2023.8.8.tar.gz", hash = "sha256:fcbdc5f2b0f1cd0f6a56cdb46fe41d2cce1e644e3b68832f3eeebc5fb0f7712e"}, ] [[package]] @@ -2575,7 +2816,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -optional = true +optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, @@ -2671,6 +2912,17 @@ PyYAML = ">=4.2b1,<7.0" radon = ">=4,<6" requests = ">=2.0,<3.0" +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + [[package]] name = "zipp" version = "3.15.0" @@ -2689,6 +2941,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [extras] all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] aws-sdk = ["boto3"] +datadog = [] parser = ["pydantic"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] @@ -2696,4 +2949,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "bda535adaf225acfc4e90829893a0f7677761596824fd3c8e6bea4ff24fad07a" +content-hash = "d99019198a55e402af3251eb826136b743dcced7df7faa84c1b920f009dd4dbf" diff --git a/pyproject.toml b/pyproject.toml index 51cab19741c..04998e0da56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,7 @@ ijson = "^3.2.2" typed-ast = { version = "^1.5.5", python = "< 3.8"} hvac = "^1.1.1" aws-requests-auth = "^0.4.3" +datadog-lambda = "^4.77.0" [tool.poetry.extras] parser = ["pydantic"] @@ -84,6 +85,7 @@ tracer = ["aws-xray-sdk"] all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] +datadog=["datadog-lambda"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.79.6" diff --git a/tests/functional/metrics/conftest.py b/tests/functional/metrics/conftest.py index cb0e083ca1f..2de3a0087c2 100644 --- a/tests/functional/metrics/conftest.py +++ b/tests/functional/metrics/conftest.py @@ -29,6 +29,11 @@ def metric() -> Dict[str, str]: return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} +@pytest.fixture +def metric_datadog() -> Dict[str, str]: + return {"name": "single_metric", "value": 1, "timestamp": 1691678198, "powertools": "datadog"} + + @pytest.fixture def metrics() -> List[Dict[str, str]]: return [ diff --git a/tests/functional/metrics/test_metrics_datadog.py b/tests/functional/metrics/test_metrics_datadog.py new file mode 100644 index 00000000000..c81c825f656 --- /dev/null +++ b/tests/functional/metrics/test_metrics_datadog.py @@ -0,0 +1,281 @@ +import json +import warnings +from collections import namedtuple + +import pytest +from test_metrics_provider import capture_metrics_output + +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.provider.cold_start import reset_cold_start_flag +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics, DatadogProvider + + +def test_datadog_coldstart(capsys): + reset_cold_start_flag() + + # GIVEN DatadogMetrics is initialized + dd_provider = DatadogProvider(flush_to_log=True) + metrics = DatadogMetrics(provider=dd_provider) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN log_metrics is used with capture_cold_start_metric + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn2")) + logs = capsys.readouterr().out.strip() + + # THEN ColdStart metric and function_name and service dimension should be logged + assert "ColdStart" in logs + assert "example_fn2" in logs + + +def test_datadog_write_to_log_with_env_variable(capsys, monkeypatch): + # GIVEN DD_FLUSH_TO_LOG env is configured + monkeypatch.setenv("DD_FLUSH_TO_LOG", "True") + metrics = DatadogMetrics() + + # WHEN we add a metric + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + metrics.flush_metrics() + logs = capture_metrics_output(capsys) + + # THEN metrics is flushed to log + logs["e"] = "" + assert logs == json.loads('{"m":"item_sold","v":1,"e":"","t":["product:latte","order:online"]}') + + +def test_datadog_with_invalid_metric_value(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + # WHEN we pass an incorrect metric value (non-numeric) + # WHEN we attempt to serialize a valid Datadog metric + # THEN it should fail validation and raise MetricValueError + with pytest.raises(MetricValueError, match=".*is not a valid number"): + metrics.add_metric(name="item_sold", value="a", product="latte", order="online") + + +def test_datadog_with_invalid_metric_name(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + # WHEN we a metric name starting with a number + # WHEN we attempt to serialize a valid Datadog metric + # THEN it should fail validation and raise MetricValueError + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name="1_item_sold", value="a", product="latte", order="online") + + +def test_datadog_raise_on_empty(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN we set raise_on_empty_metrics to True + @metrics.log_metrics(raise_on_empty_metrics=True) + def lambda_handler(event, context): + pass + + # THEN it should fail with no metric serialized + with pytest.raises(SchemaValidationError, match="Must contain at least one metric."): + lambda_handler({}, LambdaContext("example_fn")) + + +def test_datadog_tags_using_kwargs(capsys): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics(flush_to_log=True) + + # WHEN we add tags using kwargs + metrics.add_metric("order_valve", 12.45, sales="sam") + metrics.flush_metrics() + logs = capsys.readouterr().out.strip() + log_dict = json.loads(logs) + tag_list = log_dict.get("t") + + # THEN tags must be present + assert "sales:sam" in tag_list + + +def test_metrics_clear_metrics_after_invocation(metric_datadog): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric_datadog) + + # WHEN log_metrics is used to flush metrics from memory + @my_metrics.log_metrics + def lambda_handler(evt, context): + pass + + lambda_handler({}, {}) + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set == [] + + +def test_metrics_decorator_with_metrics_warning(): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + + # WHEN using the log_metrics decorator and no metrics have been added + @my_metrics.log_metrics + def lambda_handler(evt, context): + pass + + # THEN it should raise a warning instead of throwing an exception + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("default") + lambda_handler({}, {}) + assert len(w) == 1 + assert str(w[-1].message) == ( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'" + ) + + +def test_metrics_with_default_namespace(capsys, namespace): + # GIVEN DatadogMetrics is initialized with default namespace + metrics = DatadogMetrics(flush_to_log=True) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN we add metrics + @metrics.log_metrics + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn2")) + logs = capsys.readouterr().out.strip() + + # THEN default namespace must be assumed + assert namespace not in logs + + +def test_datadog_with_non_default_namespace(capsys, namespace): + # GIVEN DatadogMetrics is initialized with a non-default namespace + metrics = DatadogMetrics(namespace=namespace, flush_to_log=True) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN log_metrics is used + @metrics.log_metrics + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn")) + logs = capsys.readouterr().out.strip() + + # THEN namespace must be present in logs + assert namespace in logs + + +def test_serialize_metrics(metric_datadog): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric_datadog) + + # WHEN we serialize metrics + my_metrics.serialize_metric_set() + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set[0]["m"] == "single_metric" + + +def test_clear_metrics(metric): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric) + my_metrics.clear_metrics() + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set == [] + + +def test_persist_default_tags(capsys): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN we utilize log_metrics to serialize + # and flush metrics and clear all metrics and tags from memory + # at the end of a function execution + @my_metrics.log_metrics + def lambda_handler(evt, ctx): + my_metrics.add_metric(name="item_sold", value=1) + + lambda_handler({}, {}) + first_invocation = capsys.readouterr().out.strip() + + lambda_handler({}, {}) + second_invocation = capsys.readouterr().out.strip() + + # THEN we should have default tags in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_log_metrics_with_default_tags(capsys): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics(flush_to_log=True) + default_tags = {"environment": "test", "log_group": "/lambda/test"} + + # WHEN we utilize log_metrics with default dimensions to serialize + # and flush metrics and clear all metrics and tags from memory + # at the end of a function execution + @my_metrics.log_metrics(default_tags=default_tags) + def lambda_handler(evt, ctx): + my_metrics.add_metric(name="item_sold", value=1) + + lambda_handler({}, {}) + first_invocation = capsys.readouterr().out.strip() + + lambda_handler({}, {}) + second_invocation = capsys.readouterr().out.strip() + + # THEN we should have default tags in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_clear_default_tags(): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics() + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN they are removed via clear_default_tags method + my_metrics.clear_default_tags() + + # THEN there should be no default tags + assert not my_metrics.default_tags + + +def test_namespace_var_precedence(monkeypatch, namespace): + # GIVEN we use POWERTOOLS_METRICS_NAMESPACE + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", "a_namespace") + my_metrics = DatadogMetrics(namespace=namespace, flush_to_log=True) + + # WHEN creating a metric and explicitly set a namespace + my_metrics.add_metric(name="item_sold", value=1) + + output = my_metrics.serialize_metric_set() + + # THEN namespace should match the explicitly passed variable and not the env var + assert output[0]["m"] == f"{namespace}.item_sold" + + +def test_namespace_env_var(monkeypatch): + # GIVEN POWERTOOLS_METRICS_NAMESPACE is set + env_namespace = "a_namespace" + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", env_namespace) + my_metrics = DatadogMetrics(flush_to_log=True) + + # WHEN creating a metric and explicitly set a namespace + my_metrics.add_metric(name="item_sold", value=1) + + output = my_metrics.serialize_metric_set() + + # THEN namespace should match the explicitly passed variable and not the env var + assert output[0]["m"] == f"{env_namespace}.item_sold" diff --git a/tests/unit/metrics/conftest.py b/tests/unit/metrics/conftest.py new file mode 100644 index 00000000000..8d601e4d13b --- /dev/null +++ b/tests/unit/metrics/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def namespace() -> str: + return "test_namespace" diff --git a/tests/unit/metrics/test_functions.py b/tests/unit/metrics/test_functions.py new file mode 100644 index 00000000000..f3414720bba --- /dev/null +++ b/tests/unit/metrics/test_functions.py @@ -0,0 +1,63 @@ +import pytest + +from aws_lambda_powertools.metrics.functions import ( + extract_cloudwatch_metric_resolution_value, + extract_cloudwatch_metric_unit_value, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import ( + MetricResolutionError, + MetricUnitError, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit + + +def test_extract_invalid_cloudwatch_metric_resolution_value(): + metric_resolutions = [resolution.value for resolution in MetricResolution] + + # GIVEN an invalid EMF resolution value + resolution = 2 + + # WHEN try to extract this value + # THEN must fail with MetricResolutionError + with pytest.raises(MetricResolutionError, match="Invalid metric resolution.*"): + extract_cloudwatch_metric_resolution_value(metric_resolutions, resolution=resolution) + + +def test_extract_valid_cloudwatch_metric_resolution_value(): + metric_resolutions = [resolution.value for resolution in MetricResolution] + + # GIVEN a valid EMF resolution value + resolution = 1 + + # WHEN try to extract this value + extracted_resolution_value = extract_cloudwatch_metric_resolution_value(metric_resolutions, resolution=resolution) + + # THEN value must be extracted + assert extracted_resolution_value == resolution + + +def test_extract_invalid_cloudwatch_metric_unit_value(): + metric_units = [unit.value for unit in MetricUnit] + metric_unit_valid_options = list(MetricUnit.__members__) + + # GIVEN an invalid EMF unit value + unit = "Fake" + + # WHEN try to extract this value + # THEN must fail with MetricUnitError + with pytest.raises(MetricUnitError, match="Invalid metric unit.*"): + extract_cloudwatch_metric_unit_value(metric_units, metric_unit_valid_options, unit=unit) + + +def test_extract_valid_cloudwatch_metric_unit_value(): + metric_units = [unit.value for unit in MetricUnit] + metric_unit_valid_options = list(MetricUnit.__members__) + + # GIVEN an invalid EMF unit value + unit = "Count" + + # WHEN try to extract this value + extracted_unit_value = extract_cloudwatch_metric_unit_value(metric_units, metric_unit_valid_options, unit=unit) + + # THEN value must be extracted + assert extracted_unit_value == unit diff --git a/tests/unit/metrics/test_unit_datadog.py b/tests/unit/metrics/test_unit_datadog.py new file mode 100644 index 00000000000..ab54e9730fe --- /dev/null +++ b/tests/unit/metrics/test_unit_datadog.py @@ -0,0 +1,69 @@ +import pytest + +from aws_lambda_powertools.metrics.exceptions import SchemaValidationError +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning + + +def test_get_namespace_property(namespace): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(namespace=namespace) + + # WHEN we try to access the namespace property + # THEN namespace property must be present + assert my_metrics.namespace == namespace + + +def test_set_namespace_property(namespace): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics() + + # WHEN we set the namespace property after ther initialization + my_metrics.namespace = namespace + + # THEN namespace property must be present + assert my_metrics.namespace == namespace + + +def test_default_tags_across_instances(): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics() + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN a new DatadogMetrics instance is created + same_metrics = DatadogMetrics() + + # THEN default tags should also be present in the new instance + assert "environment" in same_metrics.default_tags + + +def test_invalid_datadog_metric_name(): + metrics = DatadogMetrics() + + # GIVEN three metrics names with different invalid names + metric_name_1 = "1_metric" # Metric name must not start with number + metric_name_2 = "metric_รง" # Metric name must not contains unicode characters + metric_name_3 = "".join(["x" for _ in range(201)]) # Metric name must have less than 200 characters + + # WHEN we try to validate those metrics names + # THEN must be False + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_1, value=1) + + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_2, value=1) + + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_3, value=1) + + +def test_invalid_datadog_metric_tag(): + metrics = DatadogMetrics() + + # GIVEN three metrics with different invalid tags + metric_tag_1 = "".join(["x" for _ in range(201)]) # Metric tags must have less than 200 characters + + # WHEN we try to validate those metrics tags + # THEN must be False + with pytest.warns(DatadogDataValidationWarning): + metrics.add_metric(name="metric_2", value=1, tag1=metric_tag_1)