diff --git a/python/.flake8 b/python/.flake8 index 24489b25a7d..e55ad0bdf36 100644 --- a/python/.flake8 +++ b/python/.flake8 @@ -1,6 +1,6 @@ [flake8] exclude = docs, .eggs, setup.py, example, .aws-sam -ignore = E203, E266, W503, BLK100, W291 +ignore = E203, E266, W503, BLK100, W291, I004 max-line-length = 120 max-complexity = 18 diff --git a/python/.gitignore b/python/.gitignore index 19e878767f0..fcab8fae4e0 100644 --- a/python/.gitignore +++ b/python/.gitignore @@ -293,4 +293,6 @@ $RECYCLE.BIN/ # Misc -test_report \ No newline at end of file +test_report +wheelhouse +docs \ No newline at end of file diff --git a/python/HISTORY.md b/python/HISTORY.md index cf7e8dcd5e2..41120a69554 100644 --- a/python/HISTORY.md +++ b/python/HISTORY.md @@ -1,5 +1,18 @@ # HISTORY +## April 9th, 2020 + +**0.6.3** + +* Fix `log_metrics` decorator logic not calling the decorated function, and exception handling + +## April 8th, 2020 + +**0.6.1** + +* Introduces Metrics middleware to utilise CloudWatch Embedded Metric Format +* Adds deprecation warning for `log_metrics` + ## February 20th, 2020 **0.5.0** diff --git a/python/Makefile b/python/Makefile index 30dd772d618..a6ce4aefeb9 100644 --- a/python/Makefile +++ b/python/Makefile @@ -15,7 +15,7 @@ lint: format poetry run flake8 test: - poetry run pytest + poetry run pytest -vvv test-html: poetry run pytest --cov-report html @@ -25,18 +25,30 @@ pr: lint test build: pr poetry run build +docs: dev + poetry run pdoc --html --output-dir docs ./aws_lambda_powertools --force + +docs-dev: + poetry run pdoc --http : aws_lambda_powertools + # # Use `poetry version /` for version bump # -release: +release-prod: poetry config pypi-token.pypi ${PYPI_TOKEN} - @$(MAKE) build - poetry publish - rm -rf dist + poetry publish -n release-test: poetry config repositories.testpypi https://test.pypi.org/legacy poetry config pypi-token.pypi ${PYPI_TEST_TOKEN} - @$(MAKE) build - poetry publish --repository testpypi - rm -rf dist + poetry publish --repository testpypi -n + +build-linux-wheels: + poetry build + docker run --env PLAT=manylinux1_x86_64 --rm -it -v ${PWD}:/io -w /io quay.io/pypa/manylinux1_x86_64 /io/build_linux_wheels.sh + cp ./wheelhouse/* dist/ && rm -rf wheelhouse + +release: + $(MAKE) build-linux-wheels + $(MAKE) release-test + $(MAKE) release-prod diff --git a/python/README.md b/python/README.md index d3d2ce90b08..f369e359939 100644 --- a/python/README.md +++ b/python/README.md @@ -25,6 +25,13 @@ A suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, * Log sampling enables DEBUG log level for a percentage of requests (disabled by default) - Enable via `POWERTOOLS_LOGGER_SAMPLE_RATE=0.1`, ranges from 0 to 1, where 0.1 is 10% and 1 is 100% +**Metrics** + +* Aggregate up to 100 metrics using a single CloudWatch Embedded Metric Format object (large JSON blob) +* Context manager to create an one off metric with a different dimension than metrics already aggregated +* Validate against common metric definitions mistakes (metric unit, values, max dimensions, max metrics, etc) +* No stack, custom resource, data collection needed — Metrics are created async by CloudWatch EMF + **Environment variables** used across suite of utilities Environment variable | Description | Default | Utility @@ -33,6 +40,7 @@ POWERTOOLS_SERVICE_NAME | Sets service name used for tracing namespace, metrics POWERTOOLS_TRACE_DISABLED | Disables tracing | "false" | tracing POWERTOOLS_LOGGER_LOG_EVENT | Logs incoming event | "false" | logging POWERTOOLS_LOGGER_SAMPLE_RATE | Debug log sampling | 0 | logging +POWERTOOLS_METRICS_NAMESPACE | Metrics namespace | None | metrics LOG_LEVEL | Sets logging level | "INFO" | logging ## Usage @@ -148,41 +156,60 @@ def handler(event, context) #### Custom Metrics async -> **NOTE**: This will **likely change after Beta** in light of [new Amazon CloudWatch embedded metric format](https://aws.amazon.com/about-aws/whats-new/2019/11/amazon-cloudwatch-launches-embedded-metric-format/), meaning we won't need an additional stack and interface could change. +> **NOTE** `log_metric` will be removed once it's GA. + +This feature makes use of CloudWatch Embedded Metric Format (EMF) and metrics are created asynchronously by CloudWatch service + +> Contrary to `log_metric`, you don't need any custom resource or additional CloudFormation stack anymore. + +Metrics middleware validates against the minimum necessary for a metric to be published: -This feature requires [Custom Metrics SAR App](https://serverlessrepo.aws.amazon.com/applications/arn:aws:serverlessrepo:us-east-1:374852340823:applications~async-custom-metrics) in order to process canonical metric lines in CloudWatch Logs. +* At least of one Metric and Dimension +* Maximum of 9 dimensions +* Only one Namespace +* [Any Metric unit supported by CloudWatch](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html) -If you're starting from scratch, you may want to see a working example, tune to your needs and deploy within your account - [Serverless Airline Log Processing Stack](https://github.com/aws-samples/aws-serverless-airline-booking/blob/develop/src/backend/log-processing/template.yaml) +**Creating multiple metrics** + +`log_metrics` decorator calls the decorated function, so leave that for last decorator or will fail with `SchemaValidationError` if no metrics are recorded. ```python -from aws_lambda_powertools.logging import MetricUnit, log_metric +from aws_lambda_powertools.metrics import Metrics, MetricUnit -def handler(event, context) - log_metric(name="SuccessfulPayment", unit=MetricUnit.Count, value=10, namespace="MyApplication") - - # Optional dimensions - log_metric(name="SuccessfulPayment", unit=MetricUnit.Count, value=10, namespace="MyApplication", customer_id="123-abc", charge_id="abc-123") - - # Explicit service name - log_metric(service="paymentTest", name="SuccessfulPayment", namespace="MyApplication".....) - ... +metrics = Metrics() +metrics.add_namespace(name="ServerlessAirline") +metrics.add_metric(name="ColdStart", unit="Count", value=1) +metrics.add_dimension(name="service", value="booking") + +@metrics.log_metrics +@tracer.capture_lambda_handler +def lambda_handler(evt, ctx): + metrics.add_metric(name="BookingConfirmation", unit="Count", value=1) + some_code() + return True + +def some_code(): + metrics.add_metric(name="some_other_metric", unit=MetricUnit.Seconds, value=1) + ... ``` -**Exerpt output in CloudWatch Logs** +CloudWatch EMF uses the same dimensions across all metrics. If you have metrics that should have different dimensions, use `single_metric` to create a single metric with any dimension you want. Generally, this would be an edge case since you [pay for unique metric](https://aws.amazon.com/cloudwatch/pricing/) -``` -MONITORING|10|Count|SuccessfulPayment|MyApplication|service="payment -MONITORING|10|Count|SuccessfulPayment|MyApplication|customer_id="123-abc",charge_id="abc-123",service="payment -MONITORING|10|Count|SuccessfulPayment|MyApplication|service="paymentTest -``` +> unique metric = (metric_name + dimension_name + dimension_value) +```python +from aws_lambda_powertools.metrics import MetricUnit, single_metric + +with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric: + metric.add_dimension(name="function_context", value="$LATEST") +``` ## Beta +> **[Progress towards GA](https://github.com/awslabs/aws-lambda-powertools/projects/1)** + This library may change its API/methods or environment variables as it receives feedback from customers. Currently looking for ideas in the following areas before making it stable: * **Should Tracer patch all possible imported libraries by default or only AWS SDKs?** - Patching all libraries may have a small performance penalty (~50ms) at cold start - Alternatively, we could patch only AWS SDK if available and to provide a param to patch multiple `Tracer(modules=("boto3", "requests"))` -* **Create a Tracer provider to support additional tracing** - - Either duck typing or ABC to allow additional tracing providers diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 7cb84aa0542..de344f1f569 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -3,6 +3,7 @@ import logging import os import random +import warnings from distutils.util import strtobool from typing import Any, Callable, Dict @@ -237,6 +238,7 @@ def log_metric( keyword arguments as additional dimensions (e.g. customer=customerId) """ + warnings.warn(message="This method will be removed in GA; use Metrics instead", category=DeprecationWarning) logger.debug(f"Building new custom metric. Name: {name}, Unit: {unit}, Value: {value}, Dimensions: {dimensions}") service = os.getenv("POWERTOOLS_SERVICE_NAME") or service dimensions = __build_dimensions(**dimensions) diff --git a/python/aws_lambda_powertools/metrics/__init__.py b/python/aws_lambda_powertools/metrics/__init__.py new file mode 100644 index 00000000000..d3ef0bb9817 --- /dev/null +++ b/python/aws_lambda_powertools/metrics/__init__.py @@ -0,0 +1,17 @@ +"""CloudWatch Embedded Metric Format utility +""" +from aws_lambda_powertools.helper.models import MetricUnit + +from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError, UniqueNamespaceError +from .metric import single_metric +from .metrics import Metrics + +__all__ = [ + "Metrics", + "single_metric", + "MetricUnit", + "MetricUnitError", + "SchemaValidationError", + "MetricValueError", + "UniqueNamespaceError", +] diff --git a/python/aws_lambda_powertools/metrics/base.py b/python/aws_lambda_powertools/metrics/base.py new file mode 100644 index 00000000000..3c45bc619f8 --- /dev/null +++ b/python/aws_lambda_powertools/metrics/base.py @@ -0,0 +1,206 @@ +import datetime +import json +import logging +import numbers +import os +import pathlib +from typing import Dict, List, Union + +import fastjsonschema + +from aws_lambda_powertools.helper.models import MetricUnit + +from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError, UniqueNamespaceError + +logger = logging.getLogger(__name__) +logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) + +_schema_path = pathlib.Path(__file__).parent / "./schema.json" +with _schema_path.open() as f: + CLOUDWATCH_EMF_SCHEMA = json.load(f) + + +class MetricManager: + """Base class for metric functionality (namespace, metric, dimension, serialization) + + MetricManager creates metrics asynchronously thanks to CloudWatch Embedded Metric Format (EMF). + CloudWatch EMF can create up to 100 metrics per EMF object + and metrics, dimensions, and namespace created via MetricManager + will adhere to the schema, will be serialized and validated against EMF Schema. + + **Use `aws_lambda_powertools.metrics.metrics.Metrics` or + `aws_lambda_powertools.metrics.metric.single_metric` to create EMF metrics.** + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace to be set for all metrics + + Raises + ------ + MetricUnitError + When metric metric isn't supported by CloudWatch + MetricValueError + When metric value isn't a number + UniqueNamespaceError + When an additional namespace is set + SchemaValidationError + When metric object fails EMF schema validation + """ + + def __init__(self, metric_set: Dict[str, str] = None, dimension_set: Dict = None, namespace: str = None): + self.metric_set = metric_set or {} + self.dimension_set = dimension_set or {} + self.namespace = os.getenv("POWERTOOLS_METRICS_NAMESPACE") or namespace + + def add_namespace(self, name: str): + """Adds given metric namespace + + Example + ------- + **Add metric namespace** + + metric.add_namespace(name="ServerlessAirline") + + Parameters + ---------- + name : str + Metric namespace + """ + if self.namespace is not None: + raise UniqueNamespaceError( + f"Namespace '{self.namespace}' already set - Only one namespace is allowed across metrics" + ) + logger.debug(f"Adding metrics namespace: {name}") + self.namespace = name + + def add_metric(self, name: str, unit: MetricUnit, value: Union[float, int]): + """Adds given metric + + Example + ------- + **Add given metric using MetricUnit enum** + + metric.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) + + **Add given metric using plain string as value unit** + + metric.add_metric(name="BookingConfirmation", unit="Count", value=1) + + Parameters + ---------- + name : str + Metric name + unit : MetricUnit + `aws_lambda_powertools.helper.models.MetricUnit` + value : float + Metric value + + Raises + ------ + MetricUnitError + When metric unit is not supported by CloudWatch + """ + if len(self.metric_set) == 100: + logger.debug("Exceeded maximum of 100 metrics - Publishing existing metric set") + metrics = self.serialize_metric_set() + print(json.dumps(metrics)) + self.metric_set = {} + + if not isinstance(value, numbers.Number): + raise MetricValueError(f"{value} is not a valid number") + + if not isinstance(unit, MetricUnit): + try: + unit = MetricUnit[unit] + except KeyError: + unit_options = list(MetricUnit.__members__) + raise MetricUnitError(f"Invalid metric unit '{unit}', expected either option: {unit_options}") + + metric = {"Unit": unit.value, "Value": float(value)} + logger.debug(f"Adding metric: {name} with {metric}") + self.metric_set[name] = metric + + def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None) -> Dict: + """Serializes metric and dimensions set + + Parameters + ---------- + metrics : Dict, optional + Dictionary of metrics to serialize, by default None + dimensions : Dict, optional + Dictionary of dimensions to serialize, by default None + + Example + ------- + **Serialize metrics into EMF format** + + metrics = MetricManager() + # ...add metrics, dimensions, namespace + ret = metrics.serialize_metric_set() + + Returns + ------- + Dict + Serialized metrics following EMF specification + + Raises + ------ + SchemaValidationError + Raised when serialization fail schema validation + """ + if metrics is None: + metrics = self.metric_set + + if dimensions is None: + dimensions = self.dimension_set + + logger.debug("Serializing...", {"metrics": metrics, "dimensions": dimensions}) + + dimension_keys: List[str] = list(dimensions.keys()) + metric_names_unit: List[Dict[str, str]] = [] + metric_set: Dict[str, str] = {} + + for metric_name in metrics: + metric: str = metrics[metric_name] + metric_value: int = metric.get("Value", 0) + metric_unit: str = metric.get("Unit") + + if metric_value > 0 and metric_unit is not None: + metric_names_unit.append({"Name": metric_name, "Unit": metric["Unit"]}) + metric_set.update({metric_name: metric["Value"]}) + + metrics_definition = { + "CloudWatchMetrics": [ + {"Namespace": self.namespace, "Dimensions": [dimension_keys], "Metrics": metric_names_unit} + ] + } + metrics_timestamp = {"Timestamp": int(datetime.datetime.now().timestamp() * 1000)} + metric_set["_aws"] = {**metrics_timestamp, **metrics_definition} + + try: + logger.debug("Validating serialized metrics against CloudWatch EMF schema", metric_set) + fastjsonschema.validate(definition=CLOUDWATCH_EMF_SCHEMA, data=metric_set) + except fastjsonschema.JsonSchemaException as e: + message = f"Invalid format. Error: {e.message}, Invalid item: {e.name}" # noqa: B306, E501 + raise SchemaValidationError(message) + return metric_set + + def add_dimension(self, name: str, value: str): + """Adds given dimension to all metrics + + Example + ------- + **Add a metric dimensions** + + metric.add_dimension(name="operation", value="confirm_booking") + + Parameters + ---------- + name : str + Dimension name + value : str + Dimension value + """ + logger.debug(f"Adding dimension: {name}:{value}") + self.dimension_set[name] = value diff --git a/python/aws_lambda_powertools/metrics/exceptions.py b/python/aws_lambda_powertools/metrics/exceptions.py new file mode 100644 index 00000000000..b9b1107e747 --- /dev/null +++ b/python/aws_lambda_powertools/metrics/exceptions.py @@ -0,0 +1,14 @@ +class MetricUnitError(Exception): + pass + + +class SchemaValidationError(Exception): + pass + + +class MetricValueError(Exception): + pass + + +class UniqueNamespaceError(Exception): + pass diff --git a/python/aws_lambda_powertools/metrics/metric.py b/python/aws_lambda_powertools/metrics/metric.py new file mode 100644 index 00000000000..85ce527e576 --- /dev/null +++ b/python/aws_lambda_powertools/metrics/metric.py @@ -0,0 +1,120 @@ +import json +import logging +import os +from contextlib import contextmanager +from typing import Dict + +from aws_lambda_powertools.helper.models import MetricUnit +from aws_lambda_powertools.metrics.base import MetricManager + +logger = logging.getLogger(__name__) +logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) + + +class SingleMetric(MetricManager): + """SingleMetric creates an EMF object with a single metric. + + EMF specification doesn't allow metrics with different dimensions. + SingleMetric overrides MetricManager's add_metric method to do just that. + + Use `single_metric` when you need to create metrics with different dimensions, + otherwise `aws_lambda_powertools.metrics.metrics.Metrics` is + a more cost effective option + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace + + Example + ------- + **Creates cold start metric with function_version as dimension** + + from aws_lambda_powertools.metrics import SingleMetric, MetricUnit + import json + metric = Single_Metric() + + metric.add_namespace(name="ServerlessAirline") + metric.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) + metric.add_dimension(name="function_version", value=47) + + print(json.dumps(metric.serialize_metric_set(), indent=4)) + + Parameters + ---------- + MetricManager : MetricManager + Inherits from `aws_lambda_powertools.metrics.base.MetricManager` + """ + + def add_metric(self, name: str, unit: MetricUnit, value: float): + """Method to prevent more than one metric being created + + Parameters + ---------- + name : str + Metric name (e.g. BookingConfirmation) + unit : MetricUnit + Metric unit (e.g. "Seconds", MetricUnit.Seconds) + value : float + Metric value + """ + if len(self.metric_set) > 0: + logger.debug(f"Metric {name} already set, skipping...") + return + return super().add_metric(name, unit, value) + + +@contextmanager +def single_metric(name: str, unit: MetricUnit, value: float): + """Context manager to simplify creation of a single metric + + Example + ------- + **Creates cold start metric with function_version as dimension** + + from aws_lambda_powertools.metrics import single_metric, MetricUnit + + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric: + metric.add_namespace(name="ServerlessAirline") + metric.add_dimension(name="function_version", value=47) + + **Same as above but set namespace using environment variable** + + $ export POWERTOOLS_METRICS_NAMESPACE="ServerlessAirline" + + from aws_lambda_powertools.metrics import single_metric, MetricUnit + + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric: + metric.add_dimension(name="function_version", value=47) + + Parameters + ---------- + name : str + Metric name + unit : MetricUnit + `aws_lambda_powertools.helper.models.MetricUnit` + value : float + Metric value + + Yields + ------- + SingleMetric + SingleMetric class instance + + Raises + ------ + e + Propagate error received + """ + metric_set = None + try: + metric: SingleMetric = SingleMetric() + metric.add_metric(name=name, unit=unit, value=value) + yield metric + logger.debug("Serializing single metric") + metric_set: Dict = metric.serialize_metric_set() + except Exception as e: + raise e + finally: + logger.debug("Publishing single metric", {"metric": metric}) + print(json.dumps(metric_set)) diff --git a/python/aws_lambda_powertools/metrics/metrics.py b/python/aws_lambda_powertools/metrics/metrics.py new file mode 100644 index 00000000000..24d8f2b93a1 --- /dev/null +++ b/python/aws_lambda_powertools/metrics/metrics.py @@ -0,0 +1,113 @@ +import functools +import json +import logging +import os +from typing import Any, Callable + +from aws_lambda_powertools.metrics.base import MetricManager + +logger = logging.getLogger(__name__) +logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) + + +class Metrics(MetricManager): + """Metrics create an EMF object with up to 100 metrics + + Use Metrics when you need to create multiple metrics that have + dimensions in common (e.g. service_name="payment"). + + Metrics up to 100 metrics in memory and are shared across + all its instances. That means it can be safely instantiated outside + of a Lambda function, or anywhere else. + + A decorator (log_metrics) is provided so metrics are published at the end of its execution. + If more than 100 metrics are added at a given function execution, + these metrics are serialized and published before adding a given metric + to prevent metric truncation. + + Example + ------- + **Creates a few metrics and publish at the end of a function execution** + + from aws_lambda_powertools.metrics import Metrics + + metrics = Metrics() + metrics.add_namespace(name="ServerlessAirline") + metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) + metrics.add_metric(name="BookingConfirmation", unit="Count", value=1) + metrics.add_dimension(name="service", value="booking") + metrics.add_dimension(name="function_version", value="$LATEST") + ... + + @tracer.capture_lambda_handler + @metrics.log_metrics() + def lambda_handler(): + do_something() + return True + + def do_something(): + metrics.add_metric(name="Something", unit="Count", value=1) + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace + + Parameters + ---------- + MetricManager : MetricManager + Inherits from `aws_lambda_powertools.metrics.base.MetricManager` + + Raises + ------ + e + Propagate error received + """ + + _metrics = {} + _dimensions = {} + + def __init__(self, metric_set=None, dimension_set=None, namespace=None): + super().__init__(metric_set=self._metrics, dimension_set=self._dimensions, namespace=namespace) + + def log_metrics(self, lambda_handler: Callable[[Any, Any], Any] = None): + """Decorator to serialize and publish metrics at the end of a function execution. + + Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). + + Example + ------- + **Lambda function using tracer and metrics decorators** + + metrics = Metrics() + tracer = Tracer(service="payment") + + @tracer.capture_lambda_handler + @metrics.log_metrics + def handler(event, context) + + Parameters + ---------- + lambda_handler : Callable[[Any, Any], Any], optional + Lambda function handler, by default None + + Raises + ------ + e + Propagate error received + """ + + @functools.wraps(lambda_handler) + def decorate(*args, **kwargs): + try: + response = lambda_handler(*args, **kwargs) + except Exception as e: + raise e + finally: + metrics = self.serialize_metric_set() + logger.debug("Publishing metrics", {"metrics": metrics}) + print(json.dumps(metrics)) + + return response + + return decorate diff --git a/python/aws_lambda_powertools/metrics/schema.json b/python/aws_lambda_powertools/metrics/schema.json new file mode 100644 index 00000000000..9222cfc59ad --- /dev/null +++ b/python/aws_lambda_powertools/metrics/schema.json @@ -0,0 +1,114 @@ +{ + "type": "object", + "title": "Root Node", + "required": [ + "_aws" + ], + "properties": { + "_aws": { + "$id": "#/properties/_aws", + "type": "object", + "title": "Metadata", + "required": [ + "Timestamp", + "CloudWatchMetrics" + ], + "properties": { + "Timestamp": { + "$id": "#/properties/_aws/properties/Timestamp", + "type": "integer", + "title": "The Timestamp Schema", + "examples": [ + 1565375354953 + ] + }, + "CloudWatchMetrics": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics", + "type": "array", + "title": "MetricDirectives", + "items": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items", + "type": "object", + "title": "MetricDirective", + "required": [ + "Namespace", + "Dimensions", + "Metrics" + ], + "properties": { + "Namespace": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Namespace", + "type": "string", + "title": "CloudWatch Metrics Namespace", + "examples": [ + "MyApp" + ], + "pattern": "^(.*)$", + "minLength": 1 + }, + "Dimensions": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions", + "type": "array", + "title": "The Dimensions Schema", + "minItems": 1, + "items": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions/items", + "type": "array", + "title": "DimensionSet", + "minItems": 1, + "maxItems": 9, + "items": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Dimensions/items/items", + "type": "string", + "title": "DimensionReference", + "examples": [ + "Operation" + ], + "pattern": "^(.*)$", + "minItems": 1 + } + } + }, + "Metrics": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics", + "type": "array", + "title": "MetricDefinitions", + "minItems": 1, + "items": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items", + "type": "object", + "title": "MetricDefinition", + "required": [ + "Name" + ], + "minItems": 1, + "properties": { + "Name": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items/properties/Name", + "type": "string", + "title": "MetricName", + "examples": [ + "ProcessingLatency" + ], + "pattern": "^(.*)$", + "minLength": 1 + }, + "Unit": { + "$id": "#/properties/_aws/properties/CloudWatchMetrics/items/properties/Metrics/items/properties/Unit", + "type": "string", + "title": "MetricUnit", + "examples": [ + "Milliseconds" + ], + "pattern": "^(Seconds|Microseconds|Milliseconds|Bytes|Kilobytes|Megabytes|Gigabytes|Terabytes|Bits|Kilobits|Megabits|Gigabits|Terabits|Percent|Count|Bytes\\/Second|Kilobytes\\/Second|Megabytes\\/Second|Gigabytes\\/Second|Terabytes\\/Second|Bits\\/Second|Kilobits\\/Second|Megabits\\/Second|Gigabits\\/Second|Terabits\\/Second|Count\\/Second|None)$" + } + } + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/python/build_linux_wheels.sh b/python/build_linux_wheels.sh new file mode 100755 index 00000000000..83241c7230e --- /dev/null +++ b/python/build_linux_wheels.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -e -x + +# Install a system package required by our library +yum install -y atlas-devel + +VERSIONS=(cp36-cp36m cp37-cp37m cp38-cp38) + +# Compile wheels +for VERSION in ${VERSIONS[@]}; do + "/opt/python/${VERSION}/bin/pip" wheel /io/ -w wheelhouse/ +done + +# Bundle external shared libraries into the wheels +for whl in wheelhouse/*.whl; do + auditwheel repair "$whl" --plat $PLAT -w /io/wheelhouse/ +done + +# Install package using built wheels across all version +for VERSION in ${VERSIONS[@]}; do + echo "Installing ------ ${VERSION}" + "/opt/python/${VERSION}/bin/pip" install aws_lambda_powertools --no-index -f /io/wheelhouse +done diff --git a/python/example/hello_world/app.py b/python/example/hello_world/app.py index 9f0f0dc1bb7..c8f5060bdab 100644 --- a/python/example/hello_world/app.py +++ b/python/example/hello_world/app.py @@ -2,12 +2,18 @@ from aws_lambda_powertools.logging import logger_inject_lambda_context, logger_setup from aws_lambda_powertools.tracing import Tracer - -# import requests +from aws_lambda_powertools.metrics import Metrics, MetricUnit, single_metric +import requests tracer = Tracer() logger = logger_setup() +metrics = Metrics() + +_cold_start = True + +metrics.add_dimension(name="operation", value="example") +@metrics.log_metrics @tracer.capture_lambda_handler @logger_inject_lambda_context def lambda_handler(event, context): @@ -31,21 +37,26 @@ def lambda_handler(event, context): Return doc: https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html """ - - logger.debug("Fetching current location") - # try: - # ip = requests.get("http://checkip.amazonaws.com/") - # except requests.RequestException as e: - # # Send some context about this error to Lambda Logs - # print(e) - - # raise e + global _cold_start + if _cold_start: + logger.debug("Recording cold start metric") + metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) + metrics.add_dimension(name="function_name", value=context.function_name) + _cold_start = False + + try: + ip = requests.get("http://checkip.amazonaws.com/") + metrics.add_metric(name="SuccessfulLocations", unit="Count", value=1) + except requests.RequestException as e: + # Send some context about this error to Lambda Logs + logger.error(e) + raise e logger.info("Returning message to the caller") return { "statusCode": 200, "body": json.dumps({ "message": "hello world", - # "location": ip.text.replace("\n", "") + "location": ip.text.replace("\n", "") }), } diff --git a/python/example/hello_world/requirements.txt b/python/example/hello_world/requirements.txt index e08d1938968..0241ab2efa6 100644 --- a/python/example/hello_world/requirements.txt +++ b/python/example/hello_world/requirements.txt @@ -1,2 +1,2 @@ aws-lambda-powertools -requests +requests \ No newline at end of file diff --git a/python/example/samconfig.toml b/python/example/samconfig.toml new file mode 100644 index 00000000000..91e4dfe3615 --- /dev/null +++ b/python/example/samconfig.toml @@ -0,0 +1,9 @@ +version = 0.1 +[default] +[default.deploy] +[default.deploy.parameters] +stack_name = "example-powertools" +s3_bucket = "aws-sam-cli-managed-default-samclisourcebucket-1pssy5gdxqcao" +s3_prefix = "example-powertools" +region = "eu-west-1" +capabilities = "CAPABILITY_IAM" diff --git a/python/example/template.yaml b/python/example/template.yaml index 5e7d6e99928..c3f53b108d1 100644 --- a/python/example/template.yaml +++ b/python/example/template.yaml @@ -16,15 +16,16 @@ Resources: Properties: CodeUri: hello_world/ Handler: app.lambda_handler - Runtime: python3.8 + Runtime: python3.7 Tracing: Active Environment: Variables: - POWERTOOLS_SERVICE_NAME: example # Sets service name used for tracing namespace, metrics dimensions and structured logging - POWERTOOLS_TRACE_DISABLED: "false" # Explicitly disables tracing - POWERTOOLS_LOGGER_LOG_EVENT: "false" # Logs incoming event - POWERTOOLS_LOGGER_SAMPLE_RATE: "0" # Debug log sampling percentage - LOG_LEVEL: INFO # Log level (INFO, DEBUG, etc.) + POWERTOOLS_SERVICE_NAME: example # Sets service name used for tracing namespace, metrics dimensions and structured logging + POWERTOOLS_TRACE_DISABLED: "false" # Explicitly disables tracing + POWERTOOLS_LOGGER_LOG_EVENT: "false" # Logs incoming event + POWERTOOLS_LOGGER_SAMPLE_RATE: "0" # Debug log sampling percentage + POWERTOOLS_METRICS_NAMESPACE: "Example" # Debug log sampling percentage + LOG_LEVEL: INFO # Log level (INFO, DEBUG, etc.) Events: HelloWorld: Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api diff --git a/python/poetry.lock b/python/poetry.lock index fa8f9d45f9e..118fd5b1823 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -69,7 +69,7 @@ description = "Low-level, data-driven core of boto 3." name = "botocore" optional = false python-versions = "*" -version = "1.15.5" +version = "1.15.37" [package.dependencies] docutils = ">=0.10,<0.16" @@ -93,8 +93,8 @@ category = "dev" description = "Composable command line interface toolkit" name = "click" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "7.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "7.1.1" [[package]] category = "dev" @@ -106,12 +106,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "0.4.3" [[package]] -category = "main" +category = "dev" description = "Code coverage measurement for Python" name = "coverage" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.0.3" +version = "5.0.4" [package.dependencies] [package.dependencies.toml] @@ -153,6 +153,17 @@ optional = false python-versions = "*" version = "1.0" +[[package]] +category = "main" +description = "Fastest Python implementation of JSON schema" +name = "fastjsonschema" +optional = false +python-versions = "*" +version = "2.14.4" + +[package.extras] +devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] category = "dev" description = "A platform independent file lock." @@ -205,7 +216,7 @@ description = "Check for python builtins being used as variables or parameters." name = "flake8-builtins" optional = false python-versions = "*" -version = "1.4.2" +version = "1.5.2" [package.dependencies] flake8 = "*" @@ -267,7 +278,7 @@ description = "flake8 plugin that integrates isort ." name = "flake8-isort" optional = false python-versions = "*" -version = "2.8.0" +version = "2.9.1" [package.dependencies] flake8 = ">=3.2.1" @@ -275,7 +286,7 @@ testfixtures = "*" [package.dependencies.isort] extras = ["pyproject"] -version = ">=4.3.0" +version = ">=4.3.5" [package.extras] test = ["pytest"] @@ -305,7 +316,7 @@ description = "File identification library for Python" name = "identify" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "1.4.11" +version = "1.4.14" [package.extras] license = ["editdistance"] @@ -317,7 +328,7 @@ marker = "python_version < \"3.8\"" name = "importlib-metadata" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "1.5.0" +version = "1.6.0" [package.dependencies] zipp = ">=0.5" @@ -332,8 +343,20 @@ description = "Read resources from Python packages" marker = "python_version < \"3.7\"" name = "importlib-resources" optional = false -python-versions = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3" -version = "1.0.2" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "1.4.0" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[package.dependencies.zipp] +python = "<3.8" +version = ">=0.4" + +[package.extras] +docs = ["sphinx", "rst.linker", "jaraco.packaging"] [[package]] category = "dev" @@ -355,7 +378,7 @@ description = "JSON Matching Expressions" name = "jmespath" optional = false python-versions = "*" -version = "0.9.4" +version = "0.9.5" [[package]] category = "main" @@ -365,6 +388,43 @@ optional = false python-versions = "*" version = "1.3" +[[package]] +category = "dev" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +name = "mako" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.1.2" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["babel"] +lingua = ["lingua"] + +[[package]] +category = "dev" +description = "Python implementation of Markdown." +name = "markdown" +optional = false +python-versions = ">=3.5" +version = "3.2.1" + +[package.dependencies] +setuptools = ">=36" + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +category = "dev" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.1" + [[package]] category = "dev" description = "McCabe checker, plugin for flake8" @@ -395,7 +455,7 @@ description = "Core utilities for Python packages" name = "packaging" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.1" +version = "20.3" [package.dependencies] pyparsing = ">=2.0.2" @@ -409,6 +469,18 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "0.7.0" +[[package]] +category = "dev" +description = "Auto-generate API documentation for Python projects." +name = "pdoc3" +optional = false +python-versions = ">= 3.5" +version = "0.7.5" + +[package.dependencies] +mako = "*" +markdown = ">=3.0" + [[package]] category = "dev" description = "plugin and hook calling mechanisms for python" @@ -431,7 +503,7 @@ description = "A framework for managing and maintaining multi-language pre-commi name = "pre-commit" optional = false python-versions = ">=3.6" -version = "2.1.0" +version = "2.1.1" [package.dependencies] cfgv = ">=2.0.0" @@ -479,7 +551,7 @@ description = "Python parsing module" name = "pyparsing" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.6" +version = "2.4.7" [[package]] category = "dev" @@ -487,7 +559,7 @@ description = "pytest: simple powerful testing with Python" name = "pytest" optional = false python-versions = ">=3.5" -version = "5.3.5" +version = "5.4.1" [package.dependencies] atomicwrites = ">=1.0" @@ -553,7 +625,7 @@ description = "YAML parser and emitter for Python" name = "pyyaml" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "5.3" +version = "5.3.1" [[package]] category = "dev" @@ -561,7 +633,7 @@ description = "Alternative regular expression module, to replace re." name = "regex" optional = false python-versions = "*" -version = "2020.2.20" +version = "2020.4.4" [[package]] category = "main" @@ -577,7 +649,7 @@ description = "A collection of helpers and mock objects for unit tests and doc t name = "testfixtures" optional = false python-versions = "*" -version = "6.13.1" +version = "6.14.0" [package.extras] build = ["setuptools-git", "wheel", "twine"] @@ -585,7 +657,7 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", " test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] [[package]] -category = "main" +category = "dev" description = "Python Library for Tom's Obvious, Minimal Language" name = "toml" optional = false @@ -620,7 +692,7 @@ description = "Virtual Python Environment builder" name = "virtualenv" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "20.0.5" +version = "20.0.16" [package.dependencies] appdirs = ">=1.4.3,<2" @@ -638,7 +710,7 @@ version = ">=1.0,<2" [package.extras] docs = ["sphinx (>=2.0.0,<3)", "sphinx-argparse (>=0.2.5,<1)", "sphinx-rtd-theme (>=0.4.3,<1)", "towncrier (>=19.9.0rc1)", "proselint (>=0.10.2,<1)"] -testing = ["pytest (>=4.0.0,<6)", "coverage (>=4.5.1,<6)", "pytest-mock (>=2.0.0,<3)", "pytest-env (>=0.6.2,<1)", "packaging (>=20.0)", "xonsh (>=0.9.13,<1)"] +testing = ["pytest (>=4.0.0,<6)", "coverage (>=4.5.1,<6)", "pytest-mock (>=2.0.0,<3)", "pytest-env (>=0.6.2,<1)", "pytest-timeout (>=1.3.4,<2)", "packaging (>=20.0)", "xonsh (>=0.9.13,<1)"] [[package]] category = "dev" @@ -646,7 +718,7 @@ description = "Measures number of Terminal column cells of wide-character codes" name = "wcwidth" optional = false python-versions = "*" -version = "0.1.8" +version = "0.1.9" [[package]] category = "main" @@ -654,7 +726,7 @@ description = "Module for decorators, wrappers and monkey patching." name = "wrapt" optional = false python-versions = "*" -version = "1.12.0" +version = "1.12.1" [[package]] category = "dev" @@ -663,14 +735,14 @@ marker = "python_version < \"3.8\"" name = "zipp" optional = false python-versions = ">=3.6" -version = "3.0.0" +version = "3.1.0" [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] testing = ["jaraco.itertools", "func-timeout"] [metadata] -content-hash = "7bb35f84fdacf1eacc6fc98c3b3f3170b7bd659fef3ebfa91d707ea004592bca" +content-hash = "227b1d21877d1391dc50a8733d507226afd95471e77554328f9b2a3c2403b7fe" python-versions = "^3.6" [metadata.files] @@ -695,53 +767,53 @@ black = [ {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, ] botocore = [ - {file = "botocore-1.15.5-py2.py3-none-any.whl", hash = "sha256:8c9fa943e1890b44a7f31be2654cd4f4f88e634adadb931b0f298f1cf03a52a4"}, - {file = "botocore-1.15.5.tar.gz", hash = "sha256:9de7885e9e9d6dbc30b9846d7edd8f98251db7f00395a6f4cfd765efc7098bdb"}, + {file = "botocore-1.15.37-py2.py3-none-any.whl", hash = "sha256:30055e9a3e313400d92ca4ad599e6506d71fb1addc75f075ab7179973ac52de6"}, + {file = "botocore-1.15.37.tar.gz", hash = "sha256:51422695a5a39ca9320acd3edaf7b337bed75bbc7d260deb76c1d801adc0daa2"}, ] cfgv = [ {file = "cfgv-3.0.0-py2.py3-none-any.whl", hash = "sha256:f22b426ed59cd2ab2b54ff96608d846c33dfb8766a67f0b4a6ce130ce244414f"}, {file = "cfgv-3.0.0.tar.gz", hash = "sha256:04b093b14ddf9fd4d17c53ebfd55582d27b76ed30050193c14e560770c5360eb"}, ] click = [ - {file = "Click-7.0-py2.py3-none-any.whl", hash = "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13"}, - {file = "Click-7.0.tar.gz", hash = "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"}, + {file = "click-7.1.1-py2.py3-none-any.whl", hash = "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a"}, + {file = "click-7.1.1.tar.gz", hash = "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc"}, ] colorama = [ {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, ] coverage = [ - {file = "coverage-5.0.3-cp27-cp27m-macosx_10_12_x86_64.whl", hash = "sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f"}, - {file = "coverage-5.0.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc"}, - {file = "coverage-5.0.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a"}, - {file = "coverage-5.0.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52"}, - {file = "coverage-5.0.3-cp27-cp27m-win32.whl", hash = "sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c"}, - {file = "coverage-5.0.3-cp27-cp27m-win_amd64.whl", hash = "sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73"}, - {file = "coverage-5.0.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68"}, - {file = "coverage-5.0.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691"}, - {file = "coverage-5.0.3-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301"}, - {file = "coverage-5.0.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf"}, - {file = "coverage-5.0.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3"}, - {file = "coverage-5.0.3-cp35-cp35m-win32.whl", hash = "sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0"}, - {file = "coverage-5.0.3-cp35-cp35m-win_amd64.whl", hash = "sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0"}, - {file = "coverage-5.0.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2"}, - {file = "coverage-5.0.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894"}, - {file = "coverage-5.0.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf"}, - {file = "coverage-5.0.3-cp36-cp36m-win32.whl", hash = "sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477"}, - {file = "coverage-5.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc"}, - {file = "coverage-5.0.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8"}, - {file = "coverage-5.0.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987"}, - {file = "coverage-5.0.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea"}, - {file = "coverage-5.0.3-cp37-cp37m-win32.whl", hash = "sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc"}, - {file = "coverage-5.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e"}, - {file = "coverage-5.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb"}, - {file = "coverage-5.0.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37"}, - {file = "coverage-5.0.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d"}, - {file = "coverage-5.0.3-cp38-cp38m-win32.whl", hash = "sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954"}, - {file = "coverage-5.0.3-cp38-cp38m-win_amd64.whl", hash = "sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e"}, - {file = "coverage-5.0.3-cp39-cp39m-win32.whl", hash = "sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40"}, - {file = "coverage-5.0.3-cp39-cp39m-win_amd64.whl", hash = "sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af"}, - {file = "coverage-5.0.3.tar.gz", hash = "sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef"}, + {file = "coverage-5.0.4-cp27-cp27m-macosx_10_12_x86_64.whl", hash = "sha256:8a620767b8209f3446197c0e29ba895d75a1e272a36af0786ec70fe7834e4307"}, + {file = "coverage-5.0.4-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:73aa6e86034dad9f00f4bbf5a666a889d17d79db73bc5af04abd6c20a014d9c8"}, + {file = "coverage-5.0.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:408ce64078398b2ee2ec08199ea3fcf382828d2f8a19c5a5ba2946fe5ddc6c31"}, + {file = "coverage-5.0.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cda33311cb9fb9323958a69499a667bd728a39a7aa4718d7622597a44c4f1441"}, + {file = "coverage-5.0.4-cp27-cp27m-win32.whl", hash = "sha256:5f587dfd83cb669933186661a351ad6fc7166273bc3e3a1531ec5c783d997aac"}, + {file = "coverage-5.0.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9fad78c13e71546a76c2f8789623eec8e499f8d2d799f4b4547162ce0a4df435"}, + {file = "coverage-5.0.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2e08c32cbede4a29e2a701822291ae2bc9b5220a971bba9d1e7615312efd3037"}, + {file = "coverage-5.0.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:922fb9ef2c67c3ab20e22948dcfd783397e4c043a5c5fa5ff5e9df5529074b0a"}, + {file = "coverage-5.0.4-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:c3fc325ce4cbf902d05a80daa47b645d07e796a80682c1c5800d6ac5045193e5"}, + {file = "coverage-5.0.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:046a1a742e66d065d16fb564a26c2a15867f17695e7f3d358d7b1ad8a61bca30"}, + {file = "coverage-5.0.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6ad6ca45e9e92c05295f638e78cd42bfaaf8ee07878c9ed73e93190b26c125f7"}, + {file = "coverage-5.0.4-cp35-cp35m-win32.whl", hash = "sha256:eda55e6e9ea258f5e4add23bcf33dc53b2c319e70806e180aecbff8d90ea24de"}, + {file = "coverage-5.0.4-cp35-cp35m-win_amd64.whl", hash = "sha256:4a8a259bf990044351baf69d3b23e575699dd60b18460c71e81dc565f5819ac1"}, + {file = "coverage-5.0.4-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:f372cdbb240e09ee855735b9d85e7f50730dcfb6296b74b95a3e5dea0615c4c1"}, + {file = "coverage-5.0.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a37c6233b28e5bc340054cf6170e7090a4e85069513320275a4dc929144dccf0"}, + {file = "coverage-5.0.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:443be7602c790960b9514567917af538cac7807a7c0c0727c4d2bbd4014920fd"}, + {file = "coverage-5.0.4-cp36-cp36m-win32.whl", hash = "sha256:165a48268bfb5a77e2d9dbb80de7ea917332a79c7adb747bd005b3a07ff8caf0"}, + {file = "coverage-5.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:0a907199566269e1cfa304325cc3b45c72ae341fbb3253ddde19fa820ded7a8b"}, + {file = "coverage-5.0.4-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:513e6526e0082c59a984448f4104c9bf346c2da9961779ede1fc458e8e8a1f78"}, + {file = "coverage-5.0.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3844c3dab800ca8536f75ae89f3cf566848a3eb2af4d9f7b1103b4f4f7a5dad6"}, + {file = "coverage-5.0.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:641e329e7f2c01531c45c687efcec8aeca2a78a4ff26d49184dce3d53fc35014"}, + {file = "coverage-5.0.4-cp37-cp37m-win32.whl", hash = "sha256:db1d4e38c9b15be1521722e946ee24f6db95b189d1447fa9ff18dd16ba89f732"}, + {file = "coverage-5.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:62061e87071497951155cbccee487980524d7abea647a1b2a6eb6b9647df9006"}, + {file = "coverage-5.0.4-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:65a7e00c00472cd0f59ae09d2fb8a8aaae7f4a0cf54b2b74f3138d9f9ceb9cb2"}, + {file = "coverage-5.0.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1f66cf263ec77af5b8fe14ef14c5e46e2eb4a795ac495ad7c03adc72ae43fafe"}, + {file = "coverage-5.0.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:85596aa5d9aac1bf39fe39d9fa1051b0f00823982a1de5766e35d495b4a36ca9"}, + {file = "coverage-5.0.4-cp38-cp38-win32.whl", hash = "sha256:86a0ea78fd851b313b2e712266f663e13b6bc78c2fb260b079e8b67d970474b1"}, + {file = "coverage-5.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:03f630aba2b9b0d69871c2e8d23a69b7fe94a1e2f5f10df5049c0df99db639a0"}, + {file = "coverage-5.0.4-cp39-cp39-win32.whl", hash = "sha256:7c9762f80a25d8d0e4ab3cb1af5d9dffbddb3ee5d21c43e3474c84bf5ff941f7"}, + {file = "coverage-5.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4482f69e0701139d0f2c44f3c395d1d1d37abd81bfafbf9b6efbe2542679d892"}, + {file = "coverage-5.0.4.tar.gz", hash = "sha256:1b60a95fc995649464e0cd48cecc8288bac5f4198f21d04b8229dc4097d76823"}, ] distlib = [ {file = "distlib-0.3.0.zip", hash = "sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21"}, @@ -758,6 +830,10 @@ entrypoints = [ eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, ] +fastjsonschema = [ + {file = "fastjsonschema-2.14.4-py3-none-any.whl", hash = "sha256:02a39b518077cc73c1a537f27776527dc6c1e5012d530eb8ac0d1062efbabff7"}, + {file = "fastjsonschema-2.14.4.tar.gz", hash = "sha256:7292cde54f1c30172f78557509ad4cb152f374087fc844bd113a83e2ac494dd6"}, +] filelock = [ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, @@ -774,8 +850,8 @@ flake8-bugbear = [ {file = "flake8_bugbear-20.1.4-py36.py37.py38-none-any.whl", hash = "sha256:a3ddc03ec28ba2296fc6f89444d1c946a6b76460f859795b35b77d4920a51b63"}, ] flake8-builtins = [ - {file = "flake8-builtins-1.4.2.tar.gz", hash = "sha256:c44415fb19162ef3737056e700d5b99d48c3612a533943b4e16419a5d3de3a64"}, - {file = "flake8_builtins-1.4.2-py2.py3-none-any.whl", hash = "sha256:29bc0f7e68af481d088f5c96f8aeb02520abdfc900500484e3af969f42a38a5f"}, + {file = "flake8-builtins-1.5.2.tar.gz", hash = "sha256:fe7be13fe51bfb06bdae6096c6488e328c822c3aa080e24b91b77116a4fbb8b0"}, + {file = "flake8_builtins-1.5.2-py2.py3-none-any.whl", hash = "sha256:a0296d23da92a6f2494243b9f2039bfdb73f34aba20054c1b70b2a60c84745bb"}, ] flake8-comprehensions = [ {file = "flake8-comprehensions-3.2.2.tar.gz", hash = "sha256:e7db586bb6eb95afdfd87ed244c90e57ae1352db8ef0ad3012fca0200421e5df"}, @@ -793,8 +869,8 @@ flake8-fixme = [ {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"}, ] flake8-isort = [ - {file = "flake8-isort-2.8.0.tar.gz", hash = "sha256:64454d1f154a303cfe23ee715aca37271d4f1d299b2f2663f45b73bff14e36a9"}, - {file = "flake8_isort-2.8.0-py2.py3-none-any.whl", hash = "sha256:aa0c4d004e6be47e74f122f5b7f36554d0d78ad8bf99b497a460dedccaa7cce9"}, + {file = "flake8-isort-2.9.1.tar.gz", hash = "sha256:0d34b266080e1748412b203a1690792245011706b1858c203476b43460bf3652"}, + {file = "flake8_isort-2.9.1-py2.py3-none-any.whl", hash = "sha256:a77df28778a1ac6ac4153339ebd9d252935f3ed4379872d4f8b84986296d8cc3"}, ] flake8-variables-names = [ {file = "flake8_variables_names-0.0.3.tar.gz", hash = "sha256:d109f5a8fe8c20d64e165287330f1b0160b442d7f96e1527124ba1b63c438347"}, @@ -803,29 +879,72 @@ future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] identify = [ - {file = "identify-1.4.11-py2.py3-none-any.whl", hash = "sha256:1222b648251bdcb8deb240b294f450fbf704c7984e08baa92507e4ea10b436d5"}, - {file = "identify-1.4.11.tar.gz", hash = "sha256:d824ebe21f38325c771c41b08a95a761db1982f1fc0eee37c6c97df3f1636b96"}, + {file = "identify-1.4.14-py2.py3-none-any.whl", hash = "sha256:2bb8760d97d8df4408f4e805883dad26a2d076f04be92a10a3e43f09c6060742"}, + {file = "identify-1.4.14.tar.gz", hash = "sha256:faffea0fd8ec86bb146ac538ac350ed0c73908326426d387eded0bcc9d077522"}, ] importlib-metadata = [ - {file = "importlib_metadata-1.5.0-py2.py3-none-any.whl", hash = "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b"}, - {file = "importlib_metadata-1.5.0.tar.gz", hash = "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302"}, + {file = "importlib_metadata-1.6.0-py2.py3-none-any.whl", hash = "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f"}, + {file = "importlib_metadata-1.6.0.tar.gz", hash = "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"}, ] importlib-resources = [ - {file = "importlib_resources-1.0.2-py2.py3-none-any.whl", hash = "sha256:6e2783b2538bd5a14678284a3962b0660c715e5a0f10243fd5e00a4b5974f50b"}, - {file = "importlib_resources-1.0.2.tar.gz", hash = "sha256:d3279fd0f6f847cced9f7acc19bd3e5df54d34f93a2e7bb5f238f81545787078"}, + {file = "importlib_resources-1.4.0-py2.py3-none-any.whl", hash = "sha256:dd98ceeef3f5ad2ef4cc287b8586da4ebad15877f351e9688987ad663a0a29b8"}, + {file = "importlib_resources-1.4.0.tar.gz", hash = "sha256:4019b6a9082d8ada9def02bece4a76b131518866790d58fdda0b5f8c603b36c2"}, ] isort = [ {file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"}, {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, ] jmespath = [ - {file = "jmespath-0.9.4-py2.py3-none-any.whl", hash = "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6"}, - {file = "jmespath-0.9.4.tar.gz", hash = "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"}, + {file = "jmespath-0.9.5-py2.py3-none-any.whl", hash = "sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec"}, + {file = "jmespath-0.9.5.tar.gz", hash = "sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9"}, ] jsonpickle = [ {file = "jsonpickle-1.3-py2.py3-none-any.whl", hash = "sha256:efc6839cb341985f0c24f98650a4c1063a2877c236ffd3d7e1662f0c482bac93"}, {file = "jsonpickle-1.3.tar.gz", hash = "sha256:71bca2b80ae28af4e3f86629ef247100af7f97032b5ca8d791c1f8725b411d95"}, ] +mako = [ + {file = "Mako-1.1.2-py2.py3-none-any.whl", hash = "sha256:8e8b53c71c7e59f3de716b6832c4e401d903af574f6962edbbbf6ecc2a5fe6c9"}, + {file = "Mako-1.1.2.tar.gz", hash = "sha256:3139c5d64aa5d175dbafb95027057128b5fbd05a40c53999f3905ceb53366d9d"}, +] +markdown = [ + {file = "Markdown-3.2.1-py2.py3-none-any.whl", hash = "sha256:e4795399163109457d4c5af2183fbe6b60326c17cfdf25ce6e7474c6624f725d"}, + {file = "Markdown-3.2.1.tar.gz", hash = "sha256:90fee683eeabe1a92e149f7ba74e5ccdc81cd397bd6c516d93a8da0ef90b6902"}, +] +markupsafe = [ + {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, + {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, +] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -838,20 +957,23 @@ nodeenv = [ {file = "nodeenv-1.3.5-py2.py3-none-any.whl", hash = "sha256:5b2438f2e42af54ca968dd1b374d14a1194848955187b0e5e4be1f73813a5212"}, ] packaging = [ - {file = "packaging-20.1-py2.py3-none-any.whl", hash = "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73"}, - {file = "packaging-20.1.tar.gz", hash = "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334"}, + {file = "packaging-20.3-py2.py3-none-any.whl", hash = "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"}, + {file = "packaging-20.3.tar.gz", hash = "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3"}, ] pathspec = [ {file = "pathspec-0.7.0-py2.py3-none-any.whl", hash = "sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424"}, {file = "pathspec-0.7.0.tar.gz", hash = "sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96"}, ] +pdoc3 = [ + {file = "pdoc3-0.7.5.tar.gz", hash = "sha256:ebca75b7fcf23f3b4320abe23339834d3f08c28517718e9d29e555fc38eeb33c"}, +] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] pre-commit = [ - {file = "pre_commit-2.1.0-py2.py3-none-any.whl", hash = "sha256:5387b53bb84ad9abc9b0845775dddd4e3243fd64cdcddaa6db28d3da6fbf06c2"}, - {file = "pre_commit-2.1.0.tar.gz", hash = "sha256:5295fb6d652a6c5e0b4636cd2c73183efdf253d45b657ce7367183134e806fe1"}, + {file = "pre_commit-2.1.1-py2.py3-none-any.whl", hash = "sha256:09ebe467f43ce24377f8c2f200fe3cd2570d328eb2ce0568c8e96ce19da45fa6"}, + {file = "pre_commit-2.1.1.tar.gz", hash = "sha256:f8d555e31e2051892c7f7b3ad9f620bd2c09271d87e9eedb2ad831737d6211eb"}, ] py = [ {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"}, @@ -866,12 +988,12 @@ pyflakes = [ {file = "pyflakes-2.1.1.tar.gz", hash = "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"}, ] pyparsing = [ - {file = "pyparsing-2.4.6-py2.py3-none-any.whl", hash = "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"}, - {file = "pyparsing-2.4.6.tar.gz", hash = "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f"}, + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pytest = [ - {file = "pytest-5.3.5-py3-none-any.whl", hash = "sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6"}, - {file = "pytest-5.3.5.tar.gz", hash = "sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d"}, + {file = "pytest-5.4.1-py3-none-any.whl", hash = "sha256:0e5b30f5cb04e887b91b1ee519fa3d89049595f428c1db76e73bd7f17b09b172"}, + {file = "pytest-5.4.1.tar.gz", hash = "sha256:84dde37075b8805f3d1f392cc47e38a0e59518fb46a431cfdaf7cf1ce805f970"}, ] pytest-cov = [ {file = "pytest-cov-2.8.1.tar.gz", hash = "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b"}, @@ -886,48 +1008,48 @@ python-dateutil = [ {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, ] pyyaml = [ - {file = "PyYAML-5.3-cp27-cp27m-win32.whl", hash = "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d"}, - {file = "PyYAML-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6"}, - {file = "PyYAML-5.3-cp35-cp35m-win32.whl", hash = "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e"}, - {file = "PyYAML-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689"}, - {file = "PyYAML-5.3-cp36-cp36m-win32.whl", hash = "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994"}, - {file = "PyYAML-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e"}, - {file = "PyYAML-5.3-cp37-cp37m-win32.whl", hash = "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5"}, - {file = "PyYAML-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf"}, - {file = "PyYAML-5.3-cp38-cp38-win32.whl", hash = "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811"}, - {file = "PyYAML-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20"}, - {file = "PyYAML-5.3.tar.gz", hash = "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615"}, + {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, + {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, + {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, + {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, + {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, + {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, + {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, + {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, + {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, + {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, + {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, ] regex = [ - {file = "regex-2020.2.20-cp27-cp27m-win32.whl", hash = "sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb"}, - {file = "regex-2020.2.20-cp27-cp27m-win_amd64.whl", hash = "sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74"}, - {file = "regex-2020.2.20-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400"}, - {file = "regex-2020.2.20-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0"}, - {file = "regex-2020.2.20-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc"}, - {file = "regex-2020.2.20-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0"}, - {file = "regex-2020.2.20-cp36-cp36m-win32.whl", hash = "sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69"}, - {file = "regex-2020.2.20-cp36-cp36m-win_amd64.whl", hash = "sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b"}, - {file = "regex-2020.2.20-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e"}, - {file = "regex-2020.2.20-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242"}, - {file = "regex-2020.2.20-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce"}, - {file = "regex-2020.2.20-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab"}, - {file = "regex-2020.2.20-cp37-cp37m-win32.whl", hash = "sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431"}, - {file = "regex-2020.2.20-cp37-cp37m-win_amd64.whl", hash = "sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1"}, - {file = "regex-2020.2.20-cp38-cp38-manylinux1_i686.whl", hash = "sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045"}, - {file = "regex-2020.2.20-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26"}, - {file = "regex-2020.2.20-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2"}, - {file = "regex-2020.2.20-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70"}, - {file = "regex-2020.2.20-cp38-cp38-win32.whl", hash = "sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d"}, - {file = "regex-2020.2.20-cp38-cp38-win_amd64.whl", hash = "sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa"}, - {file = "regex-2020.2.20.tar.gz", hash = "sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5"}, + {file = "regex-2020.4.4-cp27-cp27m-win32.whl", hash = "sha256:90742c6ff121a9c5b261b9b215cb476eea97df98ea82037ec8ac95d1be7a034f"}, + {file = "regex-2020.4.4-cp27-cp27m-win_amd64.whl", hash = "sha256:24f4f4062eb16c5bbfff6a22312e8eab92c2c99c51a02e39b4eae54ce8255cd1"}, + {file = "regex-2020.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:08119f707f0ebf2da60d2f24c2f39ca616277bb67ef6c92b72cbf90cbe3a556b"}, + {file = "regex-2020.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:c9423a150d3a4fc0f3f2aae897a59919acd293f4cb397429b120a5fcd96ea3db"}, + {file = "regex-2020.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c087bff162158536387c53647411db09b6ee3f9603c334c90943e97b1052a156"}, + {file = "regex-2020.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:1cbe0fa0b7f673400eb29e9ef41d4f53638f65f9a2143854de6b1ce2899185c3"}, + {file = "regex-2020.4.4-cp36-cp36m-win32.whl", hash = "sha256:0ce9537396d8f556bcfc317c65b6a0705320701e5ce511f05fc04421ba05b8a8"}, + {file = "regex-2020.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:7e1037073b1b7053ee74c3c6c0ada80f3501ec29d5f46e42669378eae6d4405a"}, + {file = "regex-2020.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4385f12aa289d79419fede43f979e372f527892ac44a541b5446617e4406c468"}, + {file = "regex-2020.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a58dd45cb865be0ce1d5ecc4cfc85cd8c6867bea66733623e54bd95131f473b6"}, + {file = "regex-2020.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ccccdd84912875e34c5ad2d06e1989d890d43af6c2242c6fcfa51556997af6cd"}, + {file = "regex-2020.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ea4adf02d23b437684cd388d557bf76e3afa72f7fed5bbc013482cc00c816948"}, + {file = "regex-2020.4.4-cp37-cp37m-win32.whl", hash = "sha256:2294f8b70e058a2553cd009df003a20802ef75b3c629506be20687df0908177e"}, + {file = "regex-2020.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:e91ba11da11cf770f389e47c3f5c30473e6d85e06d7fd9dcba0017d2867aab4a"}, + {file = "regex-2020.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5635cd1ed0a12b4c42cce18a8d2fb53ff13ff537f09de5fd791e97de27b6400e"}, + {file = "regex-2020.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:23069d9c07e115537f37270d1d5faea3e0bdded8279081c4d4d607a2ad393683"}, + {file = "regex-2020.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c162a21e0da33eb3d31a3ac17a51db5e634fc347f650d271f0305d96601dc15b"}, + {file = "regex-2020.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:fb95debbd1a824b2c4376932f2216cc186912e389bdb0e27147778cf6acb3f89"}, + {file = "regex-2020.4.4-cp38-cp38-win32.whl", hash = "sha256:2a3bf8b48f8e37c3a40bb3f854bf0121c194e69a650b209628d951190b862de3"}, + {file = "regex-2020.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:5bfed051dbff32fd8945eccca70f5e22b55e4148d2a8a45141a3b053d6455ae3"}, + {file = "regex-2020.4.4.tar.gz", hash = "sha256:295badf61a51add2d428a46b8580309c520d8b26e769868b922750cf3ce67142"}, ] six = [ {file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"}, {file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"}, ] testfixtures = [ - {file = "testfixtures-6.13.1-py2.py3-none-any.whl", hash = "sha256:ed7f36c5d426a4271368569d3b2d35590eeaf340ee9653cc3dd06ba2e21c6ba8"}, - {file = "testfixtures-6.13.1.tar.gz", hash = "sha256:6edef5d0d39b4a11ada53307379c07c38d69db4a684d40d4e3e08ec7ba66187e"}, + {file = "testfixtures-6.14.0-py2.py3-none-any.whl", hash = "sha256:799144b3cbef7b072452d9c36cbd024fef415ab42924b96aad49dfd9c763de66"}, + {file = "testfixtures-6.14.0.tar.gz", hash = "sha256:cdfc3d73cb6d3d4dc3c67af84d912e86bf117d30ae25f02fe823382ef99383d2"}, ] toml = [ {file = "toml-0.10.0-py2.7.egg", hash = "sha256:f1db651f9657708513243e61e6cc67d101a39bad662eaa9b5546f789338e07a3"}, @@ -962,17 +1084,17 @@ urllib3 = [ {file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"}, ] virtualenv = [ - {file = "virtualenv-20.0.5-py2.py3-none-any.whl", hash = "sha256:5dd42a9f56307542bddc446cfd10ef6576f11910366a07609fe8d0d88fa8fb7e"}, - {file = "virtualenv-20.0.5.tar.gz", hash = "sha256:531b142e300d405bb9faedad4adbeb82b4098b918e35209af2adef3129274aae"}, + {file = "virtualenv-20.0.16-py2.py3-none-any.whl", hash = "sha256:94f647e12d1e6ced2541b93215e51752aecbd1bbb18eb1816e2867f7532b1fe1"}, + {file = "virtualenv-20.0.16.tar.gz", hash = "sha256:6ea131d41c477f6c4b7863948a9a54f7fa196854dbef73efbdff32b509f4d8bf"}, ] wcwidth = [ - {file = "wcwidth-0.1.8-py2.py3-none-any.whl", hash = "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603"}, - {file = "wcwidth-0.1.8.tar.gz", hash = "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"}, + {file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"}, + {file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"}, ] wrapt = [ - {file = "wrapt-1.12.0.tar.gz", hash = "sha256:0ec40d9fd4ec9f9e3ff9bdd12dbd3535f4085949f4db93025089d7a673ea94e8"}, + {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] zipp = [ - {file = "zipp-3.0.0-py3-none-any.whl", hash = "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2"}, - {file = "zipp-3.0.0.tar.gz", hash = "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"}, + {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, + {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, ] diff --git a/python/pyproject.toml b/python/pyproject.toml index 67eee5630d8..851feac4cbb 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "0.5.0" +version = "0.6.3" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] classifiers=[ @@ -20,9 +20,10 @@ license = "MIT-0" [tool.poetry.dependencies] python = "^3.6" aws-xray-sdk = "^2.4.3" -coverage = {extras = ["toml"], version = "^5.0.3"} +fastjsonschema = "^2.14.4" [tool.poetry.dev-dependencies] +coverage = {extras = ["toml"], version = "^5.0.3"} pytest = "^5.2" black = "^19.10b0" flake8 = "^3.7.9" @@ -39,6 +40,7 @@ isort = "^4.3.21" pre-commit = "^2.1.0" pytest-cov = "^2.8.1" pytest-mock = "^2.0.0" +pdoc3 = "^0.7.5" [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/python/setup.py b/python/setup.py deleted file mode 100644 index 10d368abe1b..00000000000 --- a/python/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -"""The setup script.""" - -from setuptools import find_packages, setup - -with open("README.md") as readme_file: - readme = readme_file.read() - -with open("HISTORY.md") as history_file: - history = history_file.read() - - -requirements = ["aws-xray-sdk~=2.4"] # noqa: E501 - -setup_requirements = ["pytest-runner"] - -test_requirements = ["pytest"] - -setup( - author="Amazon Web Services", - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - ], - description="Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric", - install_requires=requirements, - license="MIT-0", - long_description=readme + "\n\n" + history, - long_description_content_type="text/markdown", - include_package_data=True, - keywords="aws_lambda_powertools", - name="aws_lambda_powertools", - packages=find_packages(), - setup_requires=setup_requirements, - test_suite="tests", - tests_require=test_requirements, - version="0.5.0", - url="https://github.com/awslabs/aws-lambda-powertools/", - zip_safe=False, -) diff --git a/python/tests/functional/test_metrics.py b/python/tests/functional/test_metrics.py new file mode 100644 index 00000000000..1bd20d8cec3 --- /dev/null +++ b/python/tests/functional/test_metrics.py @@ -0,0 +1,264 @@ +import json +from typing import Dict, List + +import pytest + +from aws_lambda_powertools.metrics import ( + Metrics, + MetricUnit, + MetricUnitError, + MetricValueError, + SchemaValidationError, + UniqueNamespaceError, + single_metric, +) +from aws_lambda_powertools.metrics.base import MetricManager + + +@pytest.fixture +def metric() -> Dict[str, str]: + return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} + + +@pytest.fixture +def metrics() -> List[Dict[str, str]]: + return [ + {"name": "metric_one", "unit": MetricUnit.Count, "value": 1}, + {"name": "metric_two", "unit": MetricUnit.Count, "value": 1}, + ] + + +@pytest.fixture +def dimension() -> Dict[str, str]: + return {"name": "test_dimension", "value": "test"} + + +@pytest.fixture +def dimensions() -> List[Dict[str, str]]: + return [ + {"name": "test_dimension", "value": "test"}, + {"name": "test_dimension_2", "value": "test"}, + ] + + +@pytest.fixture +def namespace() -> Dict[str, str]: + return {"name": "test_namespace"} + + +@pytest.fixture +def a_hundred_metrics() -> List[Dict[str, str]]: + metrics = [] + for i in range(100): + metrics.append({"name": f"metric_{i}", "unit": "Count", "value": 1}) + + return metrics + + +def serialize_metrics(metrics: List[Dict], dimensions: List[Dict], namespace: Dict) -> Dict: + """ Helper function to build EMF object from a list of metrics, dimensions """ + my_metrics = MetricManager() + for metric in metrics: + my_metrics.add_metric(**metric) + + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + + my_metrics.add_namespace(**namespace) + return my_metrics.serialize_metric_set() + + +def serialize_single_metric(metric: Dict, dimension: Dict, namespace: Dict) -> Dict: + """ Helper function to build EMF object from a given metric, dimension and namespace """ + my_metrics = MetricManager() + my_metrics.add_metric(**metric) + my_metrics.add_dimension(**dimension) + my_metrics.add_namespace(**namespace) + return my_metrics.serialize_metric_set() + + +def remove_timestamp(metrics: List): + """ Helper function to remove Timestamp key from EMF objects as they're built at serialization """ + for metric in metrics: + del metric["_aws"]["Timestamp"] + + +def test_single_metric(capsys, metric, dimension, namespace): + with single_metric(**metric) as my_metrics: + my_metrics.add_dimension(**dimension) + my_metrics.add_namespace(**namespace) + + output = json.loads(capsys.readouterr().out.strip()) + expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) # Timestamp will always be different + assert expected["_aws"] == output["_aws"] + + +def test_single_metric_one_metric_only(capsys, metric, dimension, namespace): + with single_metric(**metric) as my_metrics: + my_metrics.add_metric(name="second_metric", unit="Count", value=1) + my_metrics.add_metric(name="third_metric", unit="Seconds", value=1) + my_metrics.add_dimension(**dimension) + my_metrics.add_namespace(**namespace) + + output = json.loads(capsys.readouterr().out.strip()) + expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) # Timestamp will always be different + assert expected["_aws"] == output["_aws"] + + +def test_multiple_metrics(metrics, dimensions, namespace): + my_metrics = Metrics() + for metric in metrics: + my_metrics.add_metric(**metric) + + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + + my_metrics.add_namespace(**namespace) + output = my_metrics.serialize_metric_set() + expected = serialize_metrics(metrics=metrics, dimensions=dimensions, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) # Timestamp will always be different + assert expected["_aws"] == output["_aws"] + + +def test_multiple_namespaces(metric, dimension, namespace): + namespace_a = {"name": "OtherNamespace"} + namespace_b = {"name": "AnotherNamespace"} + + with pytest.raises(UniqueNamespaceError): + with single_metric(**metric) as m: + m.add_dimension(**dimension) + m.add_namespace(**namespace) + m.add_namespace(**namespace_a) + m.add_namespace(**namespace_b) + + +def test_log_metrics(capsys, metrics, dimensions, namespace): + my_metrics = Metrics() + my_metrics.add_namespace(**namespace) + for metric in metrics: + my_metrics.add_metric(**metric) + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + + @my_metrics.log_metrics + def lambda_handler(evt, handler): + return True + + lambda_handler({}, {}) + output = json.loads(capsys.readouterr().out.strip()) + expected = serialize_metrics(metrics=metrics, dimensions=dimensions, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) # Timestamp will always be different + assert expected["_aws"] == output["_aws"] + + +def test_namespace_env_var(monkeypatch, capsys, metric, dimension, namespace): + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", namespace["name"]) + + with single_metric(**metric) as my_metrics: + my_metrics.add_dimension(**dimension) + monkeypatch.delenv("POWERTOOLS_METRICS_NAMESPACE") + + output = json.loads(capsys.readouterr().out.strip()) + expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) # Timestamp will always be different + assert expected["_aws"] == output["_aws"] + + +def test_metrics_spillover(capsys, metric, dimension, namespace, a_hundred_metrics): + my_metrics = Metrics() + my_metrics.add_namespace(**namespace) + my_metrics.add_dimension(**dimension) + + for _metric in a_hundred_metrics: + my_metrics.add_metric(**_metric) + + @my_metrics.log_metrics + def lambda_handler(evt, handler): + my_metrics.add_metric(**metric) + return True + + lambda_handler({}, {}) + + output = capsys.readouterr().out.strip() + spillover_metrics, single_metric = output.split("\n") + spillover_metrics = json.loads(spillover_metrics) + single_metric = json.loads(single_metric) + + expected_single_metric = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + expected_spillover_metrics = serialize_metrics( + metrics=a_hundred_metrics, dimensions=[dimension], namespace=namespace + ) + + remove_timestamp(metrics=[spillover_metrics, expected_spillover_metrics, single_metric, expected_single_metric]) + + assert single_metric["_aws"] == expected_single_metric["_aws"] + assert spillover_metrics["_aws"] == expected_spillover_metrics["_aws"] + + +def test_log_metrics_schema_error(metrics, dimensions, namespace): + # It should error out because by default log_metrics doesn't invoke a function + # so when decorator runs it'll raise an error while trying to serialize metrics + my_metrics = Metrics() + + @my_metrics.log_metrics + def lambda_handler(evt, handler): + my_metrics.add_namespace(namespace) + for metric in metrics: + my_metrics.add_metric(**metric) + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + return True + + with pytest.raises(SchemaValidationError): + lambda_handler({}, {}) + + +def test_incorrect_metric_unit(metric, dimension, namespace): + metric["unit"] = "incorrect_unit" + + with pytest.raises(MetricUnitError): + with single_metric(**metric) as m: + m.add_dimension(**dimension) + m.add_namespace(**namespace) + + +def test_schema_no_namespace(metric, dimension): + with pytest.raises(SchemaValidationError): + with single_metric(**metric) as m: + m.add_dimension(**dimension) + + +def test_schema_incorrect_value(metric, dimension, namespace): + metric["value"] = "some_value" + with pytest.raises(MetricValueError): + with single_metric(**metric) as m: + m.add_dimension(**dimension) + m.add_namespace(**namespace) + + +def test_schema_no_metrics(dimensions, namespace): + my_metrics = Metrics() + my_metrics.add_namespace(**namespace) + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + with pytest.raises(SchemaValidationError): + my_metrics.serialize_metric_set() + + +def test_exceed_number_of_dimensions(metric, namespace): + dimensions = [] + for i in range(11): + dimensions.append({"name": f"test_{i}", "value": "test"}) + + with pytest.raises(SchemaValidationError): + with single_metric(**metric) as m: + m.add_namespace(**namespace) + for dimension in dimensions: + m.add_dimension(**dimension)