diff --git a/aws_lambda_powertools/utilities/parser/__init__.py b/aws_lambda_powertools/utilities/parser/__init__.py index ad19168bb29..29127a3035b 100644 --- a/aws_lambda_powertools/utilities/parser/__init__.py +++ b/aws_lambda_powertools/utilities/parser/__init__.py @@ -1,10 +1,11 @@ """Advanced event_parser utility """ -from . import envelopes -from .envelopes import BaseEnvelope -from .parser import event_parser, parse -from .pydantic import BaseModel, Field, ValidationError, root_validator, validator +from pydantic import BaseModel, Field, ValidationError, field_validator, model_validator + +from aws_lambda_powertools.utilities.parser import envelopes +from aws_lambda_powertools.utilities.parser.envelopes import BaseEnvelope +from aws_lambda_powertools.utilities.parser.parser import event_parser, parse __all__ = [ "event_parser", @@ -13,7 +14,7 @@ "BaseEnvelope", "BaseModel", "Field", - "validator", - "root_validator", + "field_validator", + "model_validator", "ValidationError", ] diff --git a/aws_lambda_powertools/utilities/parser/compat.py b/aws_lambda_powertools/utilities/parser/compat.py deleted file mode 100644 index c76bc6546a5..00000000000 --- a/aws_lambda_powertools/utilities/parser/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -import functools - - -@functools.lru_cache(maxsize=None) -def disable_pydantic_v2_warning(): - """ - Disables the Pydantic version 2 warning by filtering out the related warnings. - - This function checks the version of Pydantic currently installed and if it is version 2, - it filters out the PydanticDeprecationWarning and PydanticDeprecatedSince20 warnings - to suppress them. - - Since we only need to run the code once, we are using lru_cache to improve performance. - - Note: This function assumes that Pydantic is installed. - - Usage: - disable_pydantic_v2_warning() - """ - try: - from pydantic import __version__ - - version = __version__.split(".") - - if int(version[0]) == 2: # pragma: no cover # dropping in v3 - import warnings - - from pydantic import PydanticDeprecatedSince20, PydanticDeprecationWarning - - warnings.filterwarnings("ignore", category=PydanticDeprecationWarning) - warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) - - except ImportError: # pragma: no cover # false positive; dropping in v3 - pass diff --git a/aws_lambda_powertools/utilities/parser/envelopes/base.py b/aws_lambda_powertools/utilities/parser/envelopes/base.py index 4fe2b80ea40..eefdbb7f042 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/base.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/base.py @@ -1,8 +1,11 @@ +from __future__ import annotations + import logging from abc import ABC, abstractmethod -from typing import Any, Dict, Optional, Type, TypeVar, Union +from typing import Any, Dict, Optional, TypeVar, Union -from aws_lambda_powertools.utilities.parser.types import Model +from aws_lambda_powertools.utilities.parser.functions import _retrieve_or_set_model_from_cache +from aws_lambda_powertools.utilities.parser.types import T logger = logging.getLogger(__name__) @@ -11,14 +14,14 @@ class BaseEnvelope(ABC): """ABC implementation for creating a supported Envelope""" @staticmethod - def _parse(data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Union[Model, None]: + def _parse(data: Optional[Union[Dict[str, Any], Any]], model: type[T]) -> Union[T, None]: """Parses envelope data against model provided Parameters ---------- data : Dict Data to be parsed and validated - model : Type[Model] + model : type[T] Data model to parse and validate data against Returns @@ -30,15 +33,17 @@ def _parse(data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]) -> Un logger.debug("Skipping parsing as event is None") return data + adapter = _retrieve_or_set_model_from_cache(model=model) + logger.debug("parsing event against model") if isinstance(data, str): logger.debug("parsing event as string") - return model.model_validate_json(data) + return adapter.validate_json(data) - return model.model_validate(data) + return adapter.validate_python(data) @abstractmethod - def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Type[Model]): + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: type[T]): """Implementation to parse data against envelope model, then against the data model NOTE: Call `_parse` method to fully parse data with model provided. diff --git a/aws_lambda_powertools/utilities/parser/functions.py b/aws_lambda_powertools/utilities/parser/functions.py new file mode 100644 index 00000000000..696437a6550 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/functions.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from pydantic import TypeAdapter + +from aws_lambda_powertools.shared.cache_dict import LRUDict +from aws_lambda_powertools.utilities.parser.types import T + +CACHE_TYPE_ADAPTER = LRUDict(max_items=1024) + + +def _retrieve_or_set_model_from_cache(model: type[T]) -> TypeAdapter: + """ + Retrieves or sets a TypeAdapter instance from the cache for the given model. + + If the model is already present in the cache, the corresponding TypeAdapter + instance is returned. Otherwise, a new TypeAdapter instance is created, + stored in the cache, and returned. + + Parameters + ---------- + model: type[T] + The model type for which the TypeAdapter instance should be retrieved or set. + + Returns + ------- + TypeAdapter + The TypeAdapter instance for the given model, + either retrieved from the cache or newly created and stored in the cache. + """ + id_model = id(model) + + if id_model in CACHE_TYPE_ADAPTER: + return CACHE_TYPE_ADAPTER[id_model] + + CACHE_TYPE_ADAPTER[id_model] = TypeAdapter(model) + return CACHE_TYPE_ADAPTER[id_model] diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py index 117d9500172..26a6c439704 100644 --- a/aws_lambda_powertools/utilities/parser/parser.py +++ b/aws_lambda_powertools/utilities/parser/parser.py @@ -1,11 +1,16 @@ +from __future__ import annotations + import logging import typing from typing import Any, Callable, Dict, Optional, Type, overload +from pydantic import PydanticSchemaGenerationError, ValidationError + from aws_lambda_powertools.middleware_factory import lambda_handler_decorator from aws_lambda_powertools.utilities.parser.envelopes.base import Envelope from aws_lambda_powertools.utilities.parser.exceptions import InvalidEnvelopeError, InvalidModelTypeError -from aws_lambda_powertools.utilities.parser.types import EventParserReturnType, Model +from aws_lambda_powertools.utilities.parser.functions import _retrieve_or_set_model_from_cache +from aws_lambda_powertools.utilities.parser.types import EventParserReturnType, T from aws_lambda_powertools.utilities.typing import LambdaContext logger = logging.getLogger(__name__) @@ -16,7 +21,7 @@ def event_parser( handler: Callable[..., EventParserReturnType], event: Dict[str, Any], context: LambdaContext, - model: Optional[Type[Model]] = None, + model: Optional[type[T]] = None, envelope: Optional[Type[Envelope]] = None, **kwargs: Any, ) -> EventParserReturnType: @@ -32,7 +37,7 @@ def event_parser( This is useful when you need to confirm event wrapper structure, and b) selectively extract a portion of your payload for parsing & validation. - NOTE: If envelope is omitted, the complete event is parsed to match the model parameter BaseModel definition. + NOTE: If envelope is omitted, the complete event is parsed to match the model parameter definition. Example ------- @@ -66,7 +71,7 @@ def handler(event: Order, context: LambdaContext): Lambda event to be parsed & validated context: LambdaContext Lambda context object - model: Model + model: Optional[type[T]] Your data model that will replace the event. envelope: Envelope Optional envelope to extract the model from @@ -93,24 +98,27 @@ def handler(event: Order, context: LambdaContext): "or as the type hint of `event` in the handler that it wraps", ) - if envelope: - parsed_event = parse(event=event, model=model, envelope=envelope) - else: - parsed_event = parse(event=event, model=model) + try: + if envelope: + parsed_event = parse(event=event, model=model, envelope=envelope) + else: + parsed_event = parse(event=event, model=model) - logger.debug(f"Calling handler {handler.__name__}") - return handler(parsed_event, context, **kwargs) + logger.debug(f"Calling handler {handler.__name__}") + return handler(parsed_event, context, **kwargs) + except (ValidationError, AttributeError) as exc: + raise InvalidModelTypeError(f"Error: {str(exc)}. Please ensure the type you're trying to parse into is correct") @overload -def parse(event: Dict[str, Any], model: Type[Model]) -> Model: ... # pragma: no cover +def parse(event: Dict[str, Any], model: type[T]) -> T: ... # pragma: no cover @overload -def parse(event: Dict[str, Any], model: Type[Model], envelope: Type[Envelope]) -> Model: ... # pragma: no cover +def parse(event: Dict[str, Any], model: type[T], envelope: Type[Envelope]) -> T: ... # pragma: no cover -def parse(event: Dict[str, Any], model: Type[Model], envelope: Optional[Type[Envelope]] = None): +def parse(event: Dict[str, Any], model: type[T], envelope: Optional[Type[Envelope]] = None): """Standalone function to parse & validate events using Pydantic models Typically used when you need fine-grained control over error handling compared to event_parser decorator. @@ -176,12 +184,20 @@ def handler(event: Order, context: LambdaContext): ) from exc try: + adapter = _retrieve_or_set_model_from_cache(model=model) + logger.debug("Parsing and validating event model; no envelope used") if isinstance(event, str): - return model.model_validate_json(event) + return adapter.validate_json(event) + + return adapter.validate_python(event) - return model.model_validate(event) - except AttributeError as exc: + # Pydantic raises PydanticSchemaGenerationError when the model is not a Pydantic model + # This is seen in the tests where we pass a non-Pydantic model type to the parser or + # when we pass a data structure that does not match the model (trying to parse a true/false/etc into a model) + except PydanticSchemaGenerationError as exc: + raise InvalidModelTypeError(f"The event supplied is unable to be validated into {type(model)}") from exc + except ValidationError as exc: raise InvalidModelTypeError( f"Error: {str(exc)}. Please ensure the Input model inherits from BaseModel,\n" "and your payload adheres to the specified Input model structure.\n" diff --git a/aws_lambda_powertools/utilities/parser/pydantic.py b/aws_lambda_powertools/utilities/parser/pydantic.py deleted file mode 100644 index 3d8eb2da4e1..00000000000 --- a/aws_lambda_powertools/utilities/parser/pydantic.py +++ /dev/null @@ -1,9 +0,0 @@ -# Pydantic has many utilities that some advanced customers typically use. -# Importing what's currently in the docs would likely miss something. -# As Pydantic export new types, new utilities, we will have to keep up -# with a project that's not used in our core functionalities. -# For this reason, we're relying on Pydantic's __all__ attr to allow customers -# to use `from aws_lambda_powertools.utilities.parser.pydantic import ` - -from pydantic import * # noqa: F403,F401 -from pydantic.errors import * # noqa: F403,F401 diff --git a/aws_lambda_powertools/utilities/parser/types.py b/aws_lambda_powertools/utilities/parser/types.py index 5282ccee373..e7654e3acc2 100644 --- a/aws_lambda_powertools/utilities/parser/types.py +++ b/aws_lambda_powertools/utilities/parser/types.py @@ -11,5 +11,6 @@ EventParserReturnType = TypeVar("EventParserReturnType") AnyInheritedModel = Union[Type[BaseModel], BaseModel] RawDictOrModel = Union[Dict[str, Any], AnyInheritedModel] +T = TypeVar("T") __all__ = ["Json", "Literal"] diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 2846652cc8d..b1f03cec1b7 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -11,27 +11,13 @@ This utility provides data parsing and deep validation using [Pydantic](https:// * Defines data in pure Python classes, then parse, validate and extract only what you want * Built-in envelopes to unwrap, extend, and validate popular event sources payloads * Enforces type hints at runtime with user-friendly errors -* Support for Pydantic v1 and v2 +* Support for Pydantic v2 ## Getting started ### Install -Powertools for AWS Lambda (Python) supports Pydantic v1 and v2. Each Pydantic version requires different dependencies before you can use Parser. - -#### Using Pydantic v1 - -!!! info "This is not necessary if you're installing Powertools for AWS Lambda (Python) via [Lambda Layer/SAR](../index.md#lambda-layer){target="_blank"}" - -Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g._, _requirements.txt_, _pyproject.toml_. - -???+ warning - This will increase the compressed package size by >10MB due to the Pydantic dependency. - - To reduce the impact on the package size at the expense of 30%-50% of its performance [Pydantic can also be - installed without binary files](https://pydantic-docs.helpmanual.io/install/#performance-vs-package-size-trade-off){target="_blank" rel="nofollow"}: - - Pip example: `SKIP_CYTHON=1 pip install --no-binary pydantic aws-lambda-powertools[parser]` +Powertools for AWS Lambda (Python) supports Pydantic v2. #### Using Pydantic v2 @@ -169,6 +155,14 @@ def my_function(): } ``` +#### Primitive data model parsing + +The parser allows you parse events into primitive data types, such as `dict` or classes that don't inherit from `BaseModel`. The following example shows you how to parse a [`Union`](https://docs.pydantic.dev/latest/api/standard_library_types/#union): + +```python +--8<-- "examples/parser/src/multiple_model_parsing.py" +``` + ### Built-in models Parser comes with the following built-in models: diff --git a/examples/batch_processing/src/pydantic_dynamodb.py b/examples/batch_processing/src/pydantic_dynamodb.py index dbd5cff24c4..4c4270ca472 100644 --- a/examples/batch_processing/src/pydantic_dynamodb.py +++ b/examples/batch_processing/src/pydantic_dynamodb.py @@ -9,7 +9,7 @@ EventType, process_partial_response, ) -from aws_lambda_powertools.utilities.parser import BaseModel, validator +from aws_lambda_powertools.utilities.parser import BaseModel, field_validator from aws_lambda_powertools.utilities.parser.models import ( DynamoDBStreamChangedRecordModel, DynamoDBStreamRecordModel, @@ -26,7 +26,7 @@ class OrderDynamoDB(BaseModel): # auto transform json string # so Pydantic can auto-initialize nested Order model - @validator("Message", pre=True) + @field_validator("Message", mode="before") def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): return json.loads(value["S"]) diff --git a/examples/parser/src/multiple_model_parsing.py b/examples/parser/src/multiple_model_parsing.py new file mode 100644 index 00000000000..adbde35e4d0 --- /dev/null +++ b/examples/parser/src/multiple_model_parsing.py @@ -0,0 +1,33 @@ +from typing import Any, Literal, Union + +from pydantic import BaseModel, Field + +from aws_lambda_powertools.shared.types import Annotated +from aws_lambda_powertools.utilities.parser import event_parser + + +class Cat(BaseModel): + animal: Literal["cat"] + name: str + meow: int + + +class Dog(BaseModel): + animal: Literal["dog"] + name: str + bark: int + + +Animal = Annotated[ + Union[Cat, Dog], + Field(discriminator="animal"), +] + + +@event_parser(model=Animal) +def lambda_handler(event: Animal, _: Any) -> str: + if isinstance(event, Cat): + # we have a cat! + return f"🐈: {event.name}" + + return f"🐶: {event.name}" diff --git a/tests/e2e/parser/__init__.py b/tests/e2e/parser/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/parser/conftest.py b/tests/e2e/parser/conftest.py new file mode 100644 index 00000000000..d7ef0aa0176 --- /dev/null +++ b/tests/e2e/parser/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from tests.e2e.parser.infrastructure import ParserStack + + +@pytest.fixture(autouse=True, scope="package") +def infrastructure(): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = ParserStack() + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/parser/handlers/handler_with_basic_model.py b/tests/e2e/parser/handlers/handler_with_basic_model.py new file mode 100644 index 00000000000..7b0d89dda53 --- /dev/null +++ b/tests/e2e/parser/handlers/handler_with_basic_model.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel + +from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class BasicModel(BaseModel): + product: str + version: str + + +@event_parser +def lambda_handler(event: BasicModel, context: LambdaContext): + return {"product": event.product} diff --git a/tests/e2e/parser/handlers/handler_with_dataclass.py b/tests/e2e/parser/handlers/handler_with_dataclass.py new file mode 100644 index 00000000000..7f465fe79ec --- /dev/null +++ b/tests/e2e/parser/handlers/handler_with_dataclass.py @@ -0,0 +1,15 @@ +from dataclasses import dataclass + +from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@dataclass +class BasicDataclass: + product: str + version: str + + +@event_parser +def lambda_handler(event: BasicDataclass, context: LambdaContext): + return {"product": event.product} diff --git a/tests/e2e/parser/handlers/handler_with_union_tag.py b/tests/e2e/parser/handlers/handler_with_union_tag.py new file mode 100644 index 00000000000..d822dd99a27 --- /dev/null +++ b/tests/e2e/parser/handlers/handler_with_union_tag.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from typing import Annotated, Literal, Union + +from pydantic import BaseModel, Field + +from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class SuccessCallback(BaseModel): + order_id: str + status: Literal["success"] + error_msg: str + + +class ErrorCallback(BaseModel): + status: Literal["error"] + error_msg: str + + +class PartialFailureCallback(BaseModel): + status: Literal["partial"] + error_msg: str + + +OrderCallback = Annotated[Union[SuccessCallback, ErrorCallback, PartialFailureCallback], Field(discriminator="status")] + + +@event_parser +def lambda_handler(event: OrderCallback, context: LambdaContext): + return {"error_msg": event.error_msg} diff --git a/tests/e2e/parser/infrastructure.py b/tests/e2e/parser/infrastructure.py new file mode 100644 index 00000000000..5d66905e7c7 --- /dev/null +++ b/tests/e2e/parser/infrastructure.py @@ -0,0 +1,6 @@ +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class ParserStack(BaseInfrastructure): + def create_resources(self): + self.create_lambda_functions() diff --git a/tests/e2e/parser/test_parser.py b/tests/e2e/parser/test_parser.py new file mode 100644 index 00000000000..ae0b75b344c --- /dev/null +++ b/tests/e2e/parser/test_parser.py @@ -0,0 +1,68 @@ +import json + +import pytest + +from tests.e2e.utils import data_fetcher + + +@pytest.fixture +def handler_with_basic_model_arn(infrastructure: dict) -> str: + return infrastructure.get("HandlerWithBasicModelArn", "") + + +@pytest.fixture +def handler_with_union_tag_arn(infrastructure: dict) -> str: + return infrastructure.get("HandlerWithUnionTagArn", "") + + +@pytest.fixture +def handler_with_dataclass_arn(infrastructure: dict) -> str: + return infrastructure.get("HandlerWithDataclass", "") + + +@pytest.mark.xdist_group(name="parser") +def test_parser_with_basic_model(handler_with_basic_model_arn): + # GIVEN + payload = json.dumps({"product": "powertools", "version": "v3"}) + + # WHEN + parser_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=handler_with_basic_model_arn, + payload=payload, + ) + + ret = parser_execution["Payload"].read().decode("utf-8") + + assert "powertools" in ret + + +@pytest.mark.xdist_group(name="parser") +def test_parser_with_union_tag(handler_with_union_tag_arn): + # GIVEN + payload = json.dumps({"status": "partial", "error_msg": "partial failure"}) + + # WHEN + parser_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=handler_with_union_tag_arn, + payload=payload, + ) + + ret = parser_execution["Payload"].read().decode("utf-8") + + assert "partial failure" in ret + + +@pytest.mark.xdist_group(name="parser") +def test_parser_with_dataclass(handler_with_dataclass_arn): + # GIVEN + payload = json.dumps({"product": "powertools", "version": "v3"}) + + # WHEN + parser_execution, _ = data_fetcher.get_lambda_response( + lambda_arn=handler_with_dataclass_arn, + payload=payload, + ) + + ret = parser_execution["Payload"].read().decode("utf-8") + + assert "powertools" in ret diff --git a/tests/functional/parser/test_parser.py b/tests/functional/parser/test_parser.py index f265de14590..fdcfffe0c38 100644 --- a/tests/functional/parser/test_parser.py +++ b/tests/functional/parser/test_parser.py @@ -1,11 +1,11 @@ import json -from typing import Dict, Union +from typing import Any, Dict, Literal, Union import pydantic import pytest +from aws_lambda_powertools.shared.types import Annotated from aws_lambda_powertools.utilities.parser import ( - ValidationError, event_parser, exceptions, ) @@ -18,7 +18,7 @@ def test_parser_unsupported_event(dummy_schema, invalid_value): def handle_no_envelope(event: Dict, _: LambdaContext): return event - with pytest.raises(ValidationError): + with pytest.raises(exceptions.InvalidModelTypeError): handle_no_envelope(event=invalid_value, context=LambdaContext()) @@ -75,7 +75,7 @@ def validate_field(cls, value): assert event_parsed.version == int(event_raw["version"]) -@pytest.mark.parametrize("invalid_schema", [None, str, bool(), [], (), object]) +@pytest.mark.parametrize("invalid_schema", [str, bool(), [], ()]) def test_parser_with_invalid_schema_type(dummy_event, invalid_schema): @event_parser(model=invalid_schema) def handle_no_envelope(event: Dict, _: LambdaContext): @@ -118,3 +118,36 @@ def handler(evt: dummy_schema, _: LambdaContext): assert evt.message == "hello world" handler(dummy_event["payload"], LambdaContext()) + + +@pytest.mark.parametrize( + "test_input,expected", + [ + ( + {"status": "succeeded", "name": "Clifford", "breed": "Labrador"}, + "Successfully retrieved Labrador named Clifford", + ), + ({"status": "failed", "error": "oh some error"}, "Uh oh. Had a problem: oh some error"), + ], +) +def test_parser_unions(test_input, expected): + class SuccessfulCallback(pydantic.BaseModel): + status: Literal["succeeded"] + name: str + breed: Literal["Newfoundland", "Labrador"] + + class FailedCallback(pydantic.BaseModel): + status: Literal["failed"] + error: str + + DogCallback = Annotated[Union[SuccessfulCallback, FailedCallback], pydantic.Field(discriminator="status")] + + @event_parser(model=DogCallback) + def handler(event: test_input, _: Any) -> str: + if isinstance(event, FailedCallback): + return f"Uh oh. Had a problem: {event.error}" + + return f"Successfully retrieved {event.breed} named {event.name}" + + ret = handler(test_input, None) + assert ret == expected diff --git a/tests/performance/parser/__init__.py b/tests/performance/parser/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/performance/parser/test_parser_performance.py b/tests/performance/parser/test_parser_performance.py new file mode 100644 index 00000000000..724368dbe2a --- /dev/null +++ b/tests/performance/parser/test_parser_performance.py @@ -0,0 +1,71 @@ +import time +from contextlib import contextmanager +from typing import Generator + +import pytest +from pydantic import BaseModel, Field + +from aws_lambda_powertools.shared.types import Annotated, Literal, Union +from aws_lambda_powertools.utilities.parser import parse + +# adjusted for slower machines in CI too +PARSER_VALIDATION_SLA: float = 0.005 + + +@contextmanager +def timing() -> Generator: + """ "Generator to quickly time operations. It can add 5ms so take that into account in elapsed time + + Examples + -------- + + with timing() as t: + print("something") + elapsed = t() + """ + start = time.perf_counter() + yield lambda: time.perf_counter() - start # gen as lambda to calculate elapsed time + + +class SuccessfulCallback(BaseModel): + status: Literal["succeeded"] + name: str + breed: Literal["Husky", "Labrador"] + + +class FailedCallback(BaseModel): + status: Literal["failed"] + error: str + + +class TemporaryErrorCallback(BaseModel): + status: Literal["temporary_error"] + error: str + + +class PartisalSuccessCallback(BaseModel): + status: Literal["partial_success"] + name: str + breed: Literal["Husky", "Labrador"] + + +DogCallback = Annotated[ + Union[SuccessfulCallback, FailedCallback, PartisalSuccessCallback, TemporaryErrorCallback], + Field(discriminator="status"), +] + + +@pytest.mark.perf +@pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) +def test_parser_with_cache(): + event = {"status": "temporary_error", "error": "X"} + + # WHEN we call parser 999 times + with timing() as t: + for _ in range(999): + parse(event=event, model=DogCallback) + + # THEN completion time should be below our validation SLA + elapsed = t() + if elapsed > PARSER_VALIDATION_SLA: + pytest.fail(f"Parser validation should be below {PARSER_VALIDATION_SLA}s: {elapsed}") diff --git a/tests/performance/test_high_level_imports.py b/tests/performance/test_high_level_imports.py index 7639065dd83..c1250ab690a 100644 --- a/tests/performance/test_high_level_imports.py +++ b/tests/performance/test_high_level_imports.py @@ -7,11 +7,13 @@ LOGGER_INIT_SLA: float = 0.005 METRICS_INIT_SLA: float = 0.005 TRACER_INIT_SLA: float = 0.5 +PARSER_INIT_SLA: float = 0.05 IMPORT_INIT_SLA: float = 0.035 PARENT_PACKAGE = "aws_lambda_powertools" TRACING_PACKAGE = "aws_lambda_powertools.tracing" LOGGING_PACKAGE = "aws_lambda_powertools.logging" METRICS_PACKAGE = "aws_lambda_powertools.metrics" +TRACER_PACKAGE = "aws_lambda_powertools.utilities.parser" def import_core_utilities() -> Tuple[ModuleType, ModuleType, ModuleType]: @@ -20,6 +22,7 @@ def import_core_utilities() -> Tuple[ModuleType, ModuleType, ModuleType]: importlib.import_module(TRACING_PACKAGE), importlib.import_module(LOGGING_PACKAGE), importlib.import_module(METRICS_PACKAGE), + importlib.import_module(TRACER_PACKAGE), ) @@ -93,3 +96,15 @@ def test_logger_init(benchmark): stat = benchmark.stats.stats.max if stat > LOGGER_INIT_SLA: pytest.fail(f"High level imports should be below ${LOGGER_INIT_SLA}s: {stat}") + + +@pytest.mark.perf +@pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) +def test_parser_init(benchmark): + # GIVEN parser is initialized + # WHEN default options are used + # THEN initialization perf should be below 5ms + benchmark.pedantic(import_init_logger) + stat = benchmark.stats.stats.max + if stat > PARSER_INIT_SLA: + pytest.fail(f"High level imports should be below ${PARSER_INIT_SLA}s: {stat}")