From 6694cd1c1f017aa119df383c1aab6a1860192ef5 Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 5 May 2025 17:58:13 -0700 Subject: [PATCH 01/56] implement validators and transformer --- .../declarative/transformations/__init__.py | 3 +- .../transformations/remap_field.py | 58 +++++++++++++++++++ .../declarative/validators/__init__.py | 23 ++++++++ .../declarative/validators/dpath_validator.py | 49 ++++++++++++++++ .../validators/predicate_validator.py | 26 +++++++++ .../validators/validate_adheres_to_schema.py | 31 ++++++++++ .../validators/validate_is_in_list.py | 28 +++++++++ .../validators/validate_is_of_type.py | 27 +++++++++ .../validators/validation_strategy.py | 22 +++++++ .../declarative/validators/validator.py | 18 ++++++ 10 files changed, 284 insertions(+), 1 deletion(-) create mode 100644 airbyte_cdk/sources/declarative/transformations/remap_field.py create mode 100644 airbyte_cdk/sources/declarative/validators/__init__.py create mode 100644 airbyte_cdk/sources/declarative/validators/dpath_validator.py create mode 100644 airbyte_cdk/sources/declarative/validators/predicate_validator.py create mode 100644 airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py create mode 100644 airbyte_cdk/sources/declarative/validators/validate_is_in_list.py create mode 100644 airbyte_cdk/sources/declarative/validators/validate_is_of_type.py create mode 100644 airbyte_cdk/sources/declarative/validators/validation_strategy.py create mode 100644 airbyte_cdk/sources/declarative/validators/validator.py diff --git a/airbyte_cdk/sources/declarative/transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/__init__.py index e18712a01..bf00186c2 100644 --- a/airbyte_cdk/sources/declarative/transformations/__init__.py +++ b/airbyte_cdk/sources/declarative/transformations/__init__.py @@ -12,6 +12,7 @@ # isort: split from .add_fields import AddFields +from .remap_field import RemapField from .remove_fields import RemoveFields -__all__ = ["AddFields", "RecordTransformation", "RemoveFields"] +__all__ = ["AddFields", "RecordTransformation", "RemapField", "RemoveFields"] diff --git a/airbyte_cdk/sources/declarative/transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/remap_field.py new file mode 100644 index 000000000..fda4cc964 --- /dev/null +++ b/airbyte_cdk/sources/declarative/transformations/remap_field.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, Dict, Mapping, Optional + +from airbyte_cdk.sources.declarative.transformations.transformation import RecordTransformation +from airbyte_cdk.sources.types import Config, StreamSlice, StreamState + + +@dataclass +class RemapField(RecordTransformation): + """ + Transformation that remaps a field's value to another value based on a static map. + """ + + map: Mapping[str, Any] + field_path: str + + def transform( + self, + record: Dict[str, Any], + config: Optional[Config] = None, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + ) -> None: + """ + Transforms a record by remapping a field value based on the provided map. + If the original value is found in the map, it's replaced with the mapped value. + If the value is not in the map, the field remains unchanged. + + :param record: The input record to be transformed + :param config: The user-provided configuration as specified by the source's spec + :param stream_state: The stream state + :param stream_slice: The stream slice + """ + # Extract path components + path_components = self.field_path.split(".") + + # Navigate to the parent object containing the field to remap + current = record + for i, component in enumerate(path_components[:-1]): + if component not in current: + # Path doesn't exist, so nothing to remap + return + current = current[component] + + # If we encounter a non-dict, we can't continue navigating + if not isinstance(current, dict): + return + + # The last component is the field name to remap + field_name = path_components[-1] + + # Check if the field exists and remap its value if it's in the map + if field_name in current and current[field_name] in self.map: + current[field_name] = self.map[current[field_name]] diff --git a/airbyte_cdk/sources/declarative/validators/__init__.py b/airbyte_cdk/sources/declarative/validators/__init__.py new file mode 100644 index 000000000..0715a2db9 --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/__init__.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.validators.dpath_validator import DpathValidator +from airbyte_cdk.sources.declarative.validators.predicate_validator import PredicateValidator +from airbyte_cdk.sources.declarative.validators.validate_adheres_to_schema import ( + ValidateAdheresToSchema, +) +from airbyte_cdk.sources.declarative.validators.validate_is_in_list import ValidateIsInList +from airbyte_cdk.sources.declarative.validators.validate_is_of_type import ValidateIsOfType +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy +from airbyte_cdk.sources.declarative.validators.validator import Validator + +__all__ = [ + "Validator", + "DpathValidator", + "ValidationStrategy", + "ValidateIsInList", + "ValidateIsOfType", + "ValidateAdheresToSchema", + "PredicateValidator", +] diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py new file mode 100644 index 000000000..22537db49 --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -0,0 +1,49 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, List, Union + +import dpath.util + +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.validators.validator import Validator +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +@dataclass +class DpathValidator(Validator): + """ + Validator that extracts a value at a specific path in the input data + and applies a validation strategy to it. + """ + + field_path: List[Union[InterpolatedString, str]] + strategy: ValidationStrategy + + def __post_init__(self) -> None: + self._field_path = [ + InterpolatedString.create(path, parameters={}) for path in self.field_path + ] + for path_index in range(len(self.field_path)): + if isinstance(self.field_path[path_index], str): + self._field_path[path_index] = InterpolatedString.create( + self.field_path[path_index], parameters={} + ) + + def validate(self, input_data: dict[str, Any]) -> None: + """ + Extracts the value at the specified path and applies the validation strategy. + + :param input_data: Dictionary containing the data to validate + :raises ValueError: If the path doesn't exist or validation fails + """ + try: + path = [path.eval(input_data) for path in self._field_path] + value = dpath.util.get(input_data, path) + self.strategy.validate(value) + except KeyError: + raise ValueError(f"Path '{self.field_path}' not found in the input data") + except Exception as e: + raise ValueError(f"Error validating path '{self.field_path}': {e}") diff --git a/airbyte_cdk/sources/declarative/validators/predicate_validator.py b/airbyte_cdk/sources/declarative/validators/predicate_validator.py new file mode 100644 index 000000000..af295a9c6 --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/predicate_validator.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any + +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +@dataclass +class PredicateValidator: + """ + Validator that applies a validation strategy to a value. + """ + + value: Any + strategy: ValidationStrategy + + def validate(self) -> None: + """ + Applies the validation strategy to the value. + + :raises ValueError: If validation fails + """ + self.strategy.validate(self.value) diff --git a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py new file mode 100644 index 000000000..dd6dfa343 --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py @@ -0,0 +1,31 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any + +import jsonschema + +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +@dataclass +class ValidateAdheresToSchema(ValidationStrategy): + """ + Validates that a value adheres to a specified JSON schema. + """ + + schema: dict[str, Any] + + def validate(self, value: Any) -> None: + """ + Validates the value against the JSON schema. + + :param value: The value to validate + :raises ValueError: If the value does not adhere to the schema + """ + try: + jsonschema.validate(instance=value, schema=self.schema) + except jsonschema.ValidationError as e: + raise ValueError(f"JSON schema validation error: {e.message}") diff --git a/airbyte_cdk/sources/declarative/validators/validate_is_in_list.py b/airbyte_cdk/sources/declarative/validators/validate_is_in_list.py new file mode 100644 index 000000000..d8b34fcdf --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/validate_is_in_list.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any + +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +@dataclass +class ValidateIsInList(ValidationStrategy): + """ + Validates that a value is in a list of supported values. + """ + + supported_values: list[Any] + + def validate(self, value: Any) -> None: + """ + Checks if the value is in the list of supported values. + + :param value: The value to validate + :raises ValueError: If the value is not in the list of supported values + """ + if value not in self.supported_values: + supported_values_str = ", ".join(str(v) for v in self.supported_values) + raise ValueError(f"Value '{value}' not in supported values: [{supported_values_str}]") diff --git a/airbyte_cdk/sources/declarative/validators/validate_is_of_type.py b/airbyte_cdk/sources/declarative/validators/validate_is_of_type.py new file mode 100644 index 000000000..e3d48d35b --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/validate_is_of_type.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any + +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +@dataclass +class ValidateIsOfType(ValidationStrategy): + """ + Validates that a value is of a specified type. + """ + + expected_type: Any + + def validate(self, value: Any) -> None: + """ + Checks if the value is of the expected type. + + :param value: The value to validate + :raises ValueError: If the value is not of the expected type + """ + if not isinstance(value, self.expected_type): + raise ValueError(f"Value '{value}' is not of type {self.expected_type.__name__}") diff --git a/airbyte_cdk/sources/declarative/validators/validation_strategy.py b/airbyte_cdk/sources/declarative/validators/validation_strategy.py new file mode 100644 index 000000000..fdaad6de7 --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/validation_strategy.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from abc import ABC, abstractmethod +from typing import Any + + +class ValidationStrategy(ABC): + """ + Base class for validation strategies. + """ + + @abstractmethod + def validate(self, value: Any) -> None: + """ + Validates a value according to a specific strategy. + + :param value: The value to validate + :raises ValueError: If validation fails + """ + pass diff --git a/airbyte_cdk/sources/declarative/validators/validator.py b/airbyte_cdk/sources/declarative/validators/validator.py new file mode 100644 index 000000000..81dc06b04 --- /dev/null +++ b/airbyte_cdk/sources/declarative/validators/validator.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from abc import ABC, abstractmethod +from typing import Any + + +class Validator(ABC): + @abstractmethod + def validate(self, input_data: Any) -> None: + """ + Validates the input data. + + :param input_data: The data to validate + :raises ValueError: If validation fails + """ + pass From 4dbd36377501e859b934c6a58e14b0a473597ed3 Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 6 May 2025 16:44:03 -0700 Subject: [PATCH 02/56] create config transformations --- .../config_transformations/__init__.py | 7 +++ .../config_transformation.py | 23 ++++++++ .../config_transformations/remap_field.py | 56 ++++++++++++++++++ .../transformations/remap_field.py | 58 ------------------- 4 files changed, 86 insertions(+), 58 deletions(-) create mode 100644 airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py create mode 100644 airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py create mode 100644 airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py delete mode 100644 airbyte_cdk/sources/declarative/transformations/remap_field.py diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py new file mode 100644 index 000000000..2d95a8ab9 --- /dev/null +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from .remap_field import RemapField + +__all__ = ["RemapField"] diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py new file mode 100644 index 000000000..fdb37ef9d --- /dev/null +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from abc import ABC, abstractmethod +from typing import Any, Dict + + +class ConfigTransformation(ABC): + """ + Implementations of this class define transformations that can be applied to source configurations. + """ + + @abstractmethod + def transform( + self, + config: Dict[str, Any], + ) -> None: + """ + Transform a configuration by adding, deleting, or mutating fields directly from the config reference passed in argument. + + :param config: The user-provided configuration to be transformed + """ diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py new file mode 100644 index 000000000..360d26f1f --- /dev/null +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -0,0 +1,56 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, List, Mapping, Union + +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ConfigTransformation + + +@dataclass +class RemapField(ConfigTransformation): + """ + Transformation that remaps a field's value to another value based on a static map. + """ + + map: Mapping[str, Any] + field_path: List[Union[InterpolatedString, str]] + + def __post_init__(self) -> None: + self._field_path = [ + InterpolatedString.create(path, parameters={}) for path in self.field_path + ] + for path_index in range(len(self.field_path)): + if isinstance(self.field_path[path_index], str): + self._field_path[path_index] = InterpolatedString.create( + self.field_path[path_index], parameters={} + ) + + def transform( + self, + config: Mapping[str, Any], + ) -> None: + """ + Transforms a config by remapping a field value based on the provided map. + If the original value is found in the map, it's replaced with the mapped value. + If the value is not in the map, the field remains unchanged. + + :param config: The user-provided configuration to be transformed + """ + path_components = [path.eval(config) for path in self._field_path] + + current = config + for i, component in enumerate(path_components[:-1]): + if component not in current: + return + current = current[component] + + if not isinstance(current, Mapping): + return + + field_name = path_components[-1] + + if field_name in current and current[field_name] in self.map: + current[field_name] = self.map[current[field_name]] diff --git a/airbyte_cdk/sources/declarative/transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/remap_field.py deleted file mode 100644 index fda4cc964..000000000 --- a/airbyte_cdk/sources/declarative/transformations/remap_field.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from dataclasses import dataclass -from typing import Any, Dict, Mapping, Optional - -from airbyte_cdk.sources.declarative.transformations.transformation import RecordTransformation -from airbyte_cdk.sources.types import Config, StreamSlice, StreamState - - -@dataclass -class RemapField(RecordTransformation): - """ - Transformation that remaps a field's value to another value based on a static map. - """ - - map: Mapping[str, Any] - field_path: str - - def transform( - self, - record: Dict[str, Any], - config: Optional[Config] = None, - stream_state: Optional[StreamState] = None, - stream_slice: Optional[StreamSlice] = None, - ) -> None: - """ - Transforms a record by remapping a field value based on the provided map. - If the original value is found in the map, it's replaced with the mapped value. - If the value is not in the map, the field remains unchanged. - - :param record: The input record to be transformed - :param config: The user-provided configuration as specified by the source's spec - :param stream_state: The stream state - :param stream_slice: The stream slice - """ - # Extract path components - path_components = self.field_path.split(".") - - # Navigate to the parent object containing the field to remap - current = record - for i, component in enumerate(path_components[:-1]): - if component not in current: - # Path doesn't exist, so nothing to remap - return - current = current[component] - - # If we encounter a non-dict, we can't continue navigating - if not isinstance(current, dict): - return - - # The last component is the field name to remap - field_name = path_components[-1] - - # Check if the field exists and remap its value if it's in the map - if field_name in current and current[field_name] in self.map: - current[field_name] = self.map[current[field_name]] From 03e776c9f4e4fbc9e698be50f98edb7707dca550 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 09:37:14 -0700 Subject: [PATCH 03/56] remove unnecessary validation strategies --- .../validators/validate_is_in_list.py | 28 ------------------- .../validators/validate_is_of_type.py | 27 ------------------ 2 files changed, 55 deletions(-) delete mode 100644 airbyte_cdk/sources/declarative/validators/validate_is_in_list.py delete mode 100644 airbyte_cdk/sources/declarative/validators/validate_is_of_type.py diff --git a/airbyte_cdk/sources/declarative/validators/validate_is_in_list.py b/airbyte_cdk/sources/declarative/validators/validate_is_in_list.py deleted file mode 100644 index d8b34fcdf..000000000 --- a/airbyte_cdk/sources/declarative/validators/validate_is_in_list.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from dataclasses import dataclass -from typing import Any - -from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy - - -@dataclass -class ValidateIsInList(ValidationStrategy): - """ - Validates that a value is in a list of supported values. - """ - - supported_values: list[Any] - - def validate(self, value: Any) -> None: - """ - Checks if the value is in the list of supported values. - - :param value: The value to validate - :raises ValueError: If the value is not in the list of supported values - """ - if value not in self.supported_values: - supported_values_str = ", ".join(str(v) for v in self.supported_values) - raise ValueError(f"Value '{value}' not in supported values: [{supported_values_str}]") diff --git a/airbyte_cdk/sources/declarative/validators/validate_is_of_type.py b/airbyte_cdk/sources/declarative/validators/validate_is_of_type.py deleted file mode 100644 index e3d48d35b..000000000 --- a/airbyte_cdk/sources/declarative/validators/validate_is_of_type.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from dataclasses import dataclass -from typing import Any - -from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy - - -@dataclass -class ValidateIsOfType(ValidationStrategy): - """ - Validates that a value is of a specified type. - """ - - expected_type: Any - - def validate(self, value: Any) -> None: - """ - Checks if the value is of the expected type. - - :param value: The value to validate - :raises ValueError: If the value is not of the expected type - """ - if not isinstance(value, self.expected_type): - raise ValueError(f"Value '{value}' is not of type {self.expected_type.__name__}") From 41f376bf2a6a604f8641e09648d6656f6f17f163 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 11:03:47 -0700 Subject: [PATCH 04/56] chore: format code --- airbyte_cdk/sources/declarative/transformations/__init__.py | 1 - .../transformations/config_transformations/__init__.py | 2 +- .../config_transformations/config_transformation.py | 2 +- .../transformations/config_transformations/remap_field.py | 6 ++++-- airbyte_cdk/sources/declarative/validators/__init__.py | 2 +- .../sources/declarative/validators/dpath_validator.py | 2 +- .../sources/declarative/validators/predicate_validator.py | 2 +- .../declarative/validators/validate_adheres_to_schema.py | 6 +++--- .../sources/declarative/validators/validation_strategy.py | 2 +- airbyte_cdk/sources/declarative/validators/validator.py | 2 +- 10 files changed, 14 insertions(+), 13 deletions(-) diff --git a/airbyte_cdk/sources/declarative/transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/__init__.py index bf00186c2..3bfda2331 100644 --- a/airbyte_cdk/sources/declarative/transformations/__init__.py +++ b/airbyte_cdk/sources/declarative/transformations/__init__.py @@ -12,7 +12,6 @@ # isort: split from .add_fields import AddFields -from .remap_field import RemapField from .remove_fields import RemoveFields __all__ = ["AddFields", "RecordTransformation", "RemapField", "RemoveFields"] diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py index 2d95a8ab9..4dbd08976 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from .remap_field import RemapField diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py index fdb37ef9d..a30e9cb02 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from abc import ABC, abstractmethod diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index 360d26f1f..093075e42 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -1,12 +1,14 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from dataclasses import dataclass from typing import Any, List, Mapping, Union from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString -from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ConfigTransformation +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( + ConfigTransformation, +) @dataclass diff --git a/airbyte_cdk/sources/declarative/validators/__init__.py b/airbyte_cdk/sources/declarative/validators/__init__.py index 0715a2db9..4698da6aa 100644 --- a/airbyte_cdk/sources/declarative/validators/__init__.py +++ b/airbyte_cdk/sources/declarative/validators/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from airbyte_cdk.sources.declarative.validators.dpath_validator import DpathValidator diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py index 22537db49..40d085690 100644 --- a/airbyte_cdk/sources/declarative/validators/dpath_validator.py +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from dataclasses import dataclass diff --git a/airbyte_cdk/sources/declarative/validators/predicate_validator.py b/airbyte_cdk/sources/declarative/validators/predicate_validator.py index af295a9c6..1526777ea 100644 --- a/airbyte_cdk/sources/declarative/validators/predicate_validator.py +++ b/airbyte_cdk/sources/declarative/validators/predicate_validator.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from dataclasses import dataclass diff --git a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py index dd6dfa343..376618aa5 100644 --- a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py +++ b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py @@ -1,9 +1,9 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from dataclasses import dataclass -from typing import Any +from typing import Any, Mapping import jsonschema @@ -16,7 +16,7 @@ class ValidateAdheresToSchema(ValidationStrategy): Validates that a value adheres to a specified JSON schema. """ - schema: dict[str, Any] + schema: Mapping[str, Any] def validate(self, value: Any) -> None: """ diff --git a/airbyte_cdk/sources/declarative/validators/validation_strategy.py b/airbyte_cdk/sources/declarative/validators/validation_strategy.py index fdaad6de7..123aa7694 100644 --- a/airbyte_cdk/sources/declarative/validators/validation_strategy.py +++ b/airbyte_cdk/sources/declarative/validators/validation_strategy.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from abc import ABC, abstractmethod diff --git a/airbyte_cdk/sources/declarative/validators/validator.py b/airbyte_cdk/sources/declarative/validators/validator.py index 81dc06b04..d508d7b65 100644 --- a/airbyte_cdk/sources/declarative/validators/validator.py +++ b/airbyte_cdk/sources/declarative/validators/validator.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # from abc import ABC, abstractmethod From 454cb78c3c8265a91716602c43305c75b5a9770b Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 16:16:43 -0700 Subject: [PATCH 05/56] add tests for dpath validator --- .../declarative/validators/dpath_validator.py | 32 +++++-- .../validators/test_dpath_validator.py | 92 +++++++++++++++++++ 2 files changed, 116 insertions(+), 8 deletions(-) create mode 100644 unit_tests/sources/declarative/validators/test_dpath_validator.py diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py index 40d085690..5cc94ccd1 100644 --- a/airbyte_cdk/sources/declarative/validators/dpath_validator.py +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -39,11 +39,27 @@ def validate(self, input_data: dict[str, Any]) -> None: :param input_data: Dictionary containing the data to validate :raises ValueError: If the path doesn't exist or validation fails """ - try: - path = [path.eval(input_data) for path in self._field_path] - value = dpath.util.get(input_data, path) - self.strategy.validate(value) - except KeyError: - raise ValueError(f"Path '{self.field_path}' not found in the input data") - except Exception as e: - raise ValueError(f"Error validating path '{self.field_path}': {e}") + path = [path.eval({}) for path in self._field_path] + + if len(path) == 0: + raise ValueError("Field path is empty") + + if "*" in path: + try: + values = dpath.values(input_data, path) + except KeyError as e: + raise KeyError(f"Error validating path '{self.field_path}': {e}") + for value in values: + try: + self.strategy.validate(value) + except Exception as e: + raise ValueError(f"Error validating value '{value}': {e}") + else: + try: + value = dpath.get(input_data, path) + except KeyError as e: + raise KeyError(f"Error validating path '{self.field_path}': {e}") + try: + self.strategy.validate(value) + except Exception as e: + raise ValueError(f"Error validating value '{value}': {e}") diff --git a/unit_tests/sources/declarative/validators/test_dpath_validator.py b/unit_tests/sources/declarative/validators/test_dpath_validator.py new file mode 100644 index 000000000..5177676ab --- /dev/null +++ b/unit_tests/sources/declarative/validators/test_dpath_validator.py @@ -0,0 +1,92 @@ +import pytest +from unittest import TestCase + +from airbyte_cdk.sources.declarative.validators.dpath_validator import DpathValidator +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +class MockValidationStrategy(ValidationStrategy): + def __init__(self, should_fail=False, error_message="Validation failed"): + self.should_fail = should_fail + self.error_message = error_message + self.validate_called = False + self.validated_value = None + + def validate(self, value): + self.validate_called = True + self.validated_value = value + if self.should_fail: + raise ValueError(self.error_message) + + +class TestDpathValidator(TestCase): + def test_given_valid_path_and_input_validate_is_successful(self): + strategy = MockValidationStrategy() + validator = DpathValidator(field_path=["user", "profile", "email"], strategy=strategy) + + test_data = {"user": {"profile": {"email": "test@example.com", "name": "Test User"}}} + + validator.validate(test_data) + + assert strategy.validate_called + assert strategy.validated_value + + def test_given_invalid_path_when_validate_then_raise_key_error(self): + strategy = MockValidationStrategy() + validator = DpathValidator(field_path=["user", "profile", "phone"], strategy=strategy) + + test_data = {"user": {"profile": {"email": "test@example.com"}}} + + with pytest.raises(KeyError) as context: + validator.validate(test_data) + assert "Error validating path" in str(context.exception) + assert not strategy.validate_called + + def test_given_strategy_fails_when_validate_then_raise_value_error(self): + error_message = "Invalid email format" + strategy = MockValidationStrategy(should_fail=True, error_message=error_message) + validator = DpathValidator(field_path=["user", "email"], strategy=strategy) + + test_data = {"user": {"email": "invalid-email"}} + + with pytest.raises(ValueError) as context: + validator.validate(test_data) + + assert "Error validating value" in str(context.exception) + assert error_message in str(context.exception) + assert strategy.validate_called + assert strategy.validated_value == "invalid-email" + + def test_given_empty_path_list_when_validate_then_validate_raises_exception(self): + strategy = MockValidationStrategy() + validator = DpathValidator(field_path=[], strategy=strategy) + test_data = {"key": "value"} + + with pytest.raises(ValueError): + validator.validate(test_data) + + def test_given_empty_input_data_when_validate_then_validate_raises_exception(self): + strategy = MockValidationStrategy() + validator = DpathValidator(field_path=["data", "field"], strategy=strategy) + + test_data = {} + + with pytest.raises(KeyError): + validator.validate(test_data) + + def test_path_with_wildcard_when_validate_then_validate_is_successful(self): + strategy = MockValidationStrategy() + validator = DpathValidator(field_path=["users", "*", "email"], strategy=strategy) + + test_data = { + "users": { + "user1": {"email": "user1@example.com", "name": "User One"}, + "user2": {"email": "user2@example.com", "name": "User Two"}, + } + } + + validator.validate(test_data) + + assert strategy.validate_called + assert strategy.validated_value in ["user1@example.com", "user2@example.com"] + self.assertIn(strategy.validated_value, ["user1@example.com", "user2@example.com"]) From 54f9f9f4e1cb44a674bc4175d058eb7a8944d7cd Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 16:33:58 -0700 Subject: [PATCH 06/56] add predicate validator tests --- .../validators/test_predicate_validator.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 unit_tests/sources/declarative/validators/test_predicate_validator.py diff --git a/unit_tests/sources/declarative/validators/test_predicate_validator.py b/unit_tests/sources/declarative/validators/test_predicate_validator.py new file mode 100644 index 000000000..1fa7fea8d --- /dev/null +++ b/unit_tests/sources/declarative/validators/test_predicate_validator.py @@ -0,0 +1,54 @@ +import pytest +from unittest import TestCase + +from airbyte_cdk.sources.declarative.validators.predicate_validator import PredicateValidator +from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy + + +class MockValidationStrategy(ValidationStrategy): + def __init__(self, should_fail=False, error_message="Validation failed"): + self.should_fail = should_fail + self.error_message = error_message + self.validate_called = False + self.validated_value = None + + def validate(self, value): + self.validate_called = True + self.validated_value = value + if self.should_fail: + raise ValueError(self.error_message) + + +class TestPredicateValidator(TestCase): + def test_given_valid_input_validate_is_successful(self): + strategy = MockValidationStrategy() + test_value = "test@example.com" + validator = PredicateValidator(value=test_value, strategy=strategy) + + validator.validate() + + assert strategy.validate_called + assert strategy.validated_value == test_value + + def test_given_invalid_input_when_validate_then_raise_value_error(self): + error_message = "Invalid email format" + strategy = MockValidationStrategy(should_fail=True, error_message=error_message) + test_value = "invalid-email" + validator = PredicateValidator(value=test_value, strategy=strategy) + + with pytest.raises(ValueError) as context: + validator.validate() + + assert error_message in str(context.exception) + assert strategy.validate_called + assert strategy.validated_value == test_value + + def test_given_complex_object_when_validate_then_successful(self): + strategy = MockValidationStrategy() + test_value = {"user": {"email": "test@example.com", "name": "Test User"}} + validator = PredicateValidator(value=test_value, strategy=strategy) + + validator.validate() + + assert strategy.validate_called + assert strategy.validated_value == test_value From 7881f9f4423c72e6cdc0b84daefcebbfc0656b05 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:08:37 -0700 Subject: [PATCH 07/56] add tests for RemapField --- .../config_transformations/remap_field.py | 2 + .../test_remap_field.py | 111 ++++++++++++++++++ 2 files changed, 113 insertions(+) create mode 100644 unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index 093075e42..eb2f92010 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -21,6 +21,8 @@ class RemapField(ConfigTransformation): field_path: List[Union[InterpolatedString, str]] def __post_init__(self) -> None: + if not self.field_path: + raise Exception("field_path cannot be empty.") self._field_path = [ InterpolatedString.create(path, parameters={}) for path in self.field_path ] diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py b/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py new file mode 100644 index 000000000..5ea3ac118 --- /dev/null +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py @@ -0,0 +1,111 @@ +import pytest +from unittest import TestCase +from copy import deepcopy + +from airbyte_cdk.sources.declarative.transformations.config_transformations.remap_field import ( + RemapField, +) + + +class TestRemapField(TestCase): + def test_given_valid_inputs_when_transform_then_field_is_remapped(self): + remap_transform = RemapField( + field_path=["authorization", "auth_type"], + map={"client_credentials": "oauth2", "api_key": "key_auth"}, + ) + + config = { + "authorization": { + "auth_type": "client_credentials", + "client_id": "12345", + "client_secret": "secret", + } + } + original_config = deepcopy(config) + + remap_transform.transform(config) + + assert config["authorization"]["auth_type"] == "oauth2" + assert original_config["authorization"]["auth_type"] == "client_credentials" + assert config["authorization"]["client_id"] == original_config["authorization"]["client_id"] + assert ( + config["authorization"]["client_secret"] + == original_config["authorization"]["client_secret"] + ) + + def test_given_value_not_in_map_when_transform_then_field_unchanged(self): + remap_transform = RemapField( + field_path=["authorization", "auth_type"], + map={"client_credentials": "oauth2", "api_key": "key_auth"}, + ) + + config = { + "authorization": {"auth_type": "basic_auth", "username": "user", "password": "pass"} + } + original_config = deepcopy(config) + remap_transform.transform(config) + + assert config["authorization"]["auth_type"] == "basic_auth" + assert config == original_config + + def test_given_field_path_not_in_config_when_transform_then_config_unchanged(self): + remap_transform = RemapField( + field_path=["authentication", "type"], # Not in config + map={"client_credentials": "oauth2"}, + ) + config = { + "authorization": { # Different key + "auth_type": "client_credentials" + } + } + original_config = deepcopy(config) + + remap_transform.transform(config) + + assert config == original_config + + def test_given_interpolated_path_when_transform_then_field_is_remapped(self): + remap_transform = RemapField( + field_path=["auth_data", "{{ config['auth_field_name'] }}"], + map={"basic": "basic_auth", "token": "bearer"}, + ) + + config = { + "auth_field_name": "type", + "auth_data": {"type": "token", "token_value": "abc123"}, + } + + remap_transform.transform(config) + + assert config["auth_data"]["type"] == "bearer" + + def test_given_empty_map_when_transform_then_config_unchanged(self): + remap_transform = RemapField(field_path=["authorization", "auth_type"], map={}) + + config = {"authorization": {"auth_type": "client_credentials", "client_id": "12345"}} + original_config = deepcopy(config) + + remap_transform.transform(config) + + assert config == original_config + + def test_given_empty_field_path_when_transform_then_raises_exception(self): + with pytest.raises(Exception): + RemapField(field_path=[], map={"old_value": "new_value"}) + + def test_multiple_transformations_applied_in_sequence(self): + auth_type_transform = RemapField( + field_path=["auth", "type"], map={"api_key": "key_auth", "oauth": "oauth2"} + ) + + env_transform = RemapField( + field_path=["environment"], map={"dev": "development", "prod": "production"} + ) + + config = {"auth": {"type": "oauth", "credentials": "secret"}, "environment": "dev"} + + auth_type_transform.transform(config) + env_transform.transform(config) + + assert config["auth"]["type"] == "oauth2" + assert config["environment"] == "development" From f8c252bff0915e20775d8c9e6be88284bf41814c Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:26:40 -0700 Subject: [PATCH 08/56] create tests for ValidateAdheresToSchema --- .../test_validate_adheres_to_schema.py | 121 ++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py diff --git a/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py b/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py new file mode 100644 index 000000000..8eb370335 --- /dev/null +++ b/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py @@ -0,0 +1,121 @@ +import pytest +import jsonschema +from unittest import TestCase + +from airbyte_cdk.sources.declarative.validators.validate_adheres_to_schema import ( + ValidateAdheresToSchema, +) + + +class TestValidateAdheresToSchema(TestCase): + def test_given_valid_input_matching_schema_when_validate_then_succeeds(self): + schema = { + "type": "object", + "required": ["id", "name"], + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + "email": {"type": "string", "format": "email"}, + "age": {"type": "integer", "minimum": 0}, + }, + } + + validator = ValidateAdheresToSchema(schema=schema) + + valid_data = {"id": 1, "name": "John Doe", "email": "john@example.com", "age": 30} + + validator.validate(valid_data) + + def test_given_missing_required_field_when_validate_then_raises_error(self): + schema = { + "type": "object", + "required": ["id", "name"], + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + "email": {"type": "string", "format": "email"}, + "age": {"type": "integer", "minimum": 0}, + }, + } + + validator = ValidateAdheresToSchema(schema=schema) + + # Data missing the required 'name' field + invalid_data = {"id": 1, "email": "john@example.com", "age": 30} + + with pytest.raises(ValueError) as exc_info: + validator.validate(invalid_data) + + assert "required" in str(exc_info.value) + assert "name" in str(exc_info.value) + + def test_given_incorrect_type_when_validate_then_raises_error(self): + schema = { + "type": "object", + "required": ["id", "name"], + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + "email": {"type": "string", "format": "email"}, + "age": {"type": "integer", "minimum": 0}, + }, + } + + validator = ValidateAdheresToSchema(schema=schema) + + invalid_data = { + "id": "one", # Should be an integer + "name": "John Doe", + "email": "john@example.com", + "age": 30, + } + + with pytest.raises(ValueError) as exc_info: + validator.validate(invalid_data) + + assert "type" in str(exc_info.value) + assert "integer" in str(exc_info.value) + + def test_given_invalid_schema_when_validate_then_raises_error(self): + invalid_schema = {"type": "object", "properties": {"id": {"type": "invalid_type"}}} + + validator = ValidateAdheresToSchema(schema=invalid_schema) + data = {"id": 123} + + with pytest.raises(jsonschema.exceptions.SchemaError) as exc_info: + validator.validate(data) + + assert "invalid_type" in str(exc_info.value) + + def test_given_null_value_when_validate_then_succeeds_if_nullable(self): + schema = { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + "optional_field": {"type": ["string", "null"]}, + }, + "required": ["id", "name"], + } + + validator = ValidateAdheresToSchema(schema=schema) + + data_with_null = {"id": 1, "name": "Test User", "optional_field": None} + + validator.validate(data_with_null) + + data_without_field = {"id": 1, "name": "Test User"} + + validator.validate(data_without_field) + + def test_given_empty_schema_when_validate_then_succeeds(self): + empty_schema = {} + + validator = ValidateAdheresToSchema(schema=empty_schema) + + validator.validate(123) + validator.validate("string value") + validator.validate({"complex": "object"}) + validator.validate([1, 2, 3]) + validator.validate(None) + validator.validate(True) From c492b81098123ed63d52b67ed900946c3586c8a8 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:29:19 -0700 Subject: [PATCH 09/56] chore: type check --- .../transformations/config_transformations/remap_field.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index eb2f92010..23c70a118 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -3,7 +3,7 @@ # from dataclasses import dataclass -from typing import Any, List, Mapping, Union +from typing import Any, List, Mapping, MutableMapping, Union from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( @@ -34,7 +34,7 @@ def __post_init__(self) -> None: def transform( self, - config: Mapping[str, Any], + config: MutableMapping[str, Any], ) -> None: """ Transforms a config by remapping a field value based on the provided map. From 48e5ab01d8f1c3b8ab0b700f52e2263065407620 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:31:30 -0700 Subject: [PATCH 10/56] chore: lint --- airbyte_cdk/sources/declarative/validators/__init__.py | 4 ---- .../sources/declarative/validators/dpath_validator.py | 2 +- .../config_transformations/test_remap_field.py | 5 +++-- .../sources/declarative/validators/test_dpath_validator.py | 3 ++- .../declarative/validators/test_predicate_validator.py | 3 ++- .../validators/test_validate_adheres_to_schema.py | 5 +++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/airbyte_cdk/sources/declarative/validators/__init__.py b/airbyte_cdk/sources/declarative/validators/__init__.py index 4698da6aa..cd043a168 100644 --- a/airbyte_cdk/sources/declarative/validators/__init__.py +++ b/airbyte_cdk/sources/declarative/validators/__init__.py @@ -7,8 +7,6 @@ from airbyte_cdk.sources.declarative.validators.validate_adheres_to_schema import ( ValidateAdheresToSchema, ) -from airbyte_cdk.sources.declarative.validators.validate_is_in_list import ValidateIsInList -from airbyte_cdk.sources.declarative.validators.validate_is_of_type import ValidateIsOfType from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy from airbyte_cdk.sources.declarative.validators.validator import Validator @@ -16,8 +14,6 @@ "Validator", "DpathValidator", "ValidationStrategy", - "ValidateIsInList", - "ValidateIsOfType", "ValidateAdheresToSchema", "PredicateValidator", ] diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py index 5cc94ccd1..e4a5159a8 100644 --- a/airbyte_cdk/sources/declarative/validators/dpath_validator.py +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -8,8 +8,8 @@ import dpath.util from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString -from airbyte_cdk.sources.declarative.validators.validator import Validator from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy +from airbyte_cdk.sources.declarative.validators.validator import Validator @dataclass diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py b/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py index 5ea3ac118..af0493e92 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py @@ -1,6 +1,7 @@ -import pytest -from unittest import TestCase from copy import deepcopy +from unittest import TestCase + +import pytest from airbyte_cdk.sources.declarative.transformations.config_transformations.remap_field import ( RemapField, diff --git a/unit_tests/sources/declarative/validators/test_dpath_validator.py b/unit_tests/sources/declarative/validators/test_dpath_validator.py index 5177676ab..9fcb51733 100644 --- a/unit_tests/sources/declarative/validators/test_dpath_validator.py +++ b/unit_tests/sources/declarative/validators/test_dpath_validator.py @@ -1,6 +1,7 @@ -import pytest from unittest import TestCase +import pytest + from airbyte_cdk.sources.declarative.validators.dpath_validator import DpathValidator from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy diff --git a/unit_tests/sources/declarative/validators/test_predicate_validator.py b/unit_tests/sources/declarative/validators/test_predicate_validator.py index 1fa7fea8d..8a1ab18c7 100644 --- a/unit_tests/sources/declarative/validators/test_predicate_validator.py +++ b/unit_tests/sources/declarative/validators/test_predicate_validator.py @@ -1,6 +1,7 @@ -import pytest from unittest import TestCase +import pytest + from airbyte_cdk.sources.declarative.validators.predicate_validator import PredicateValidator from airbyte_cdk.sources.declarative.validators.validation_strategy import ValidationStrategy diff --git a/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py b/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py index 8eb370335..45a33b83e 100644 --- a/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py +++ b/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py @@ -1,7 +1,8 @@ -import pytest -import jsonschema from unittest import TestCase +import jsonschema +import pytest + from airbyte_cdk.sources.declarative.validators.validate_adheres_to_schema import ( ValidateAdheresToSchema, ) From 3623325a3d750b9df2ea21c826162901938ae56a Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:39:29 -0700 Subject: [PATCH 11/56] add test for json strings --- .../test_validate_adheres_to_schema.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py b/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py index 45a33b83e..bd3ccf7fb 100644 --- a/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py +++ b/unit_tests/sources/declarative/validators/test_validate_adheres_to_schema.py @@ -114,9 +114,17 @@ def test_given_empty_schema_when_validate_then_succeeds(self): validator = ValidateAdheresToSchema(schema=empty_schema) - validator.validate(123) - validator.validate("string value") validator.validate({"complex": "object"}) - validator.validate([1, 2, 3]) - validator.validate(None) - validator.validate(True) + + def test_given_json_string_when_validate_then_succeeds(self): + schema = { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + }, + } + + validator = ValidateAdheresToSchema(schema=schema) + + validator.validate('{"id": 1, "name": "John Doe"}') From dad6100ff49fd8d175a1ab858fb05b338cedef4e Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:44:57 -0700 Subject: [PATCH 12/56] fix errant inclusion --- airbyte_cdk/sources/declarative/transformations/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte_cdk/sources/declarative/transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/__init__.py index 3bfda2331..e18712a01 100644 --- a/airbyte_cdk/sources/declarative/transformations/__init__.py +++ b/airbyte_cdk/sources/declarative/transformations/__init__.py @@ -14,4 +14,4 @@ from .add_fields import AddFields from .remove_fields import RemoveFields -__all__ = ["AddFields", "RecordTransformation", "RemapField", "RemoveFields"] +__all__ = ["AddFields", "RecordTransformation", "RemoveFields"] From 565b7090efd14e97b8d6268f6f475317df5556c0 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:52:12 -0700 Subject: [PATCH 13/56] add json string parsing to ValidateAdheresToSchema --- .../declarative/validators/validate_adheres_to_schema.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py index 376618aa5..168fb4870 100644 --- a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py +++ b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py @@ -3,6 +3,7 @@ # from dataclasses import dataclass +import json from typing import Any, Mapping import jsonschema @@ -25,6 +26,13 @@ def validate(self, value: Any) -> None: :param value: The value to validate :raises ValueError: If the value does not adhere to the schema """ + + if isinstance(value, str): + try: + value = json.loads(value) + except json.JSONDecodeError as e: + raise ValueError(f"Invalid JSON string: {value}") from e + try: jsonschema.validate(instance=value, schema=self.schema) except jsonschema.ValidationError as e: From 01415e27b6eb519582a3b043cf2622896c671c98 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 7 May 2025 17:58:27 -0700 Subject: [PATCH 14/56] chore: lint --- .../declarative/validators/validate_adheres_to_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py index 168fb4870..2ff003951 100644 --- a/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py +++ b/airbyte_cdk/sources/declarative/validators/validate_adheres_to_schema.py @@ -2,8 +2,8 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -from dataclasses import dataclass import json +from dataclasses import dataclass from typing import Any, Mapping import jsonschema From 53a5724ae24fdb35d886a1473479c863571e3cc4 Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 8 May 2025 16:26:56 -0700 Subject: [PATCH 15/56] fix anyio issue --- poetry.lock | 756 +++++++++++++++++++++++++++++++++++++------------ pyproject.toml | 2 + 2 files changed, 570 insertions(+), 188 deletions(-) diff --git a/poetry.lock b/poetry.lock index e031efb6b..01a3ee57c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -7,7 +7,7 @@ description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, @@ -20,7 +20,7 @@ description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8"}, {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5"}, @@ -111,7 +111,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -120,7 +120,7 @@ description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -136,6 +136,7 @@ description = "Declares the Airbyte Protocol using Python Dataclasses. Dataclass optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "airbyte_protocol_models_dataclasses-0.15.0-py3-none-any.whl", hash = "sha256:0fe8d7c2863c348b350efcf5f1af5872dc9071060408285e4708d97a9be5e2fb"}, {file = "airbyte_protocol_models_dataclasses-0.15.0.tar.gz", hash = "sha256:a5bad4ee7ae0a04f1436967b7afd3306d28e1cd2e5acedf0cce588f0c80ed001"}, @@ -148,6 +149,7 @@ description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -160,6 +162,7 @@ description = "Unicode to ASCII transliteration" optional = false python-versions = ">=3.3" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "anyascii-0.3.2-py3-none-any.whl", hash = "sha256:3b3beef6fc43d9036d3b0529050b0c48bfad8bc960e9e562d7223cfb94fe45d4"}, {file = "anyascii-0.3.2.tar.gz", hash = "sha256:9d5d32ef844fe225b8bc7cba7f950534fae4da27a9bf3a6bea2cb0ea46ce4730"}, @@ -167,26 +170,26 @@ files = [ [[package]] name = "anyio" -version = "4.8.0" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = true -python-versions = ">=3.9" +optional = false +python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, - {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] [package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] -trio = ["trio (>=0.26.1)"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] [[package]] name = "async-timeout" @@ -195,7 +198,7 @@ description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vector-db-based\" and python_version == \"3.10\"" +markers = "extra == \"vector-db-based\" and python_version < \"3.11\"" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, @@ -208,6 +211,7 @@ description = "reference implementation of PEP 3156" optional = false python-versions = "*" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "asyncio-3.4.3-cp33-none-win32.whl", hash = "sha256:b62c9157d36187eca799c378e572c969f0da87cd5fc42ca372d92cdb06e7e1de"}, {file = "asyncio-3.4.3-cp33-none-win_amd64.whl", hash = "sha256:c46a87b48213d7464f22d9a497b9eef8c1928b68320a2fa94240f969f6fec08c"}, @@ -222,6 +226,7 @@ description = "PEP 224 implementation" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "attributes-doc-0.4.0.tar.gz", hash = "sha256:b1576c94a714e9fc2c65c47cf10d0c8e1a5f7c4f5ae7f69006be108d95cbfbfb"}, {file = "attributes_doc-0.4.0-py2.py3-none-any.whl", hash = "sha256:4c3007d9e58f3a6cb4b9c614c4d4ce2d92161581f28e594ddd8241cc3a113bdd"}, @@ -234,18 +239,19 @@ description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "avro" @@ -254,7 +260,7 @@ description = "Avro is a serialization and RPC framework." optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "avro-1.12.0-py2.py3-none-any.whl", hash = "sha256:9a255c72e1837341dd4f6ff57b2b6f68c0f0cecdef62dd04962e10fd33bec05b"}, {file = "avro-1.12.0.tar.gz", hash = "sha256:cad9c53b23ceed699c7af6bddced42e2c572fd6b408c257a7d4fc4e8cf2e2d6b"}, @@ -271,11 +277,32 @@ description = "Function decoration for backoff and retry" optional = false python-versions = ">=3.7,<4.0" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "beartype" +version = "0.20.2" +description = "Unbearably fast near-real-time hybrid runtime-static type-checking in pure Python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "beartype-0.20.2-py3-none-any.whl", hash = "sha256:5171a91ecf01438a59884f0cde37d2d5da2c992198b53d6ba31db3940f47ff04"}, + {file = "beartype-0.20.2.tar.gz", hash = "sha256:38c60c065ad99364a8c767e8a0e71ba8263d467b91414ed5dcffb7758a2e8079"}, +] + +[package.extras] +dev = ["autoapi (>=0.9.0)", "click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] +doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] +test = ["click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] +test-tox = ["click", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "typing-extensions (>=3.10.0.0)", "xarray"] +test-tox-coverage = ["coverage (>=5.5)"] + [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -283,7 +310,7 @@ description = "Screen-scraping library" optional = true python-versions = ">=3.6.0" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, @@ -306,6 +333,7 @@ description = "When they're not builtins, they're boltons." optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "boltons-25.0.0-py3-none-any.whl", hash = "sha256:dc9fb38bf28985715497d1b54d00b62ea866eca3938938ea9043e254a3a6ca62"}, {file = "boltons-25.0.0.tar.gz", hash = "sha256:e110fbdc30b7b9868cb604e3f71d4722dd8f4dcb4a5ddd06028ba8f1ab0b5ace"}, @@ -318,6 +346,7 @@ description = "Bash style brace expander." optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bracex-2.5.post1-py3-none-any.whl", hash = "sha256:13e5732fec27828d6af308628285ad358047cec36801598368cb28bc631dbaf6"}, {file = "bracex-2.5.post1.tar.gz", hash = "sha256:12c50952415bfa773d2d9ccb8e79651b8cdb1f31a42f6091b804f6ba2b4a66b6"}, @@ -330,6 +359,7 @@ description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, @@ -342,6 +372,7 @@ description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cattrs-24.1.2-py3-none-any.whl", hash = "sha256:67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0"}, {file = "cattrs-24.1.2.tar.gz", hash = "sha256:8028cfe1ff5382df59dd36474a86e02d817b06eaf8af84555441bac915d2ef85"}, @@ -356,8 +387,8 @@ typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_ver bson = ["pymongo (>=4.4.0)"] cbor2 = ["cbor2 (>=5.4.6)"] msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5) ; implementation_name == \"cpython\""] -orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] +msgspec = ["msgspec (>=0.18.5)"] +orjson = ["orjson (>=3.9.2)"] pyyaml = ["pyyaml (>=6.0)"] tomlkit = ["tomlkit (>=0.11.8)"] ujson = ["ujson (>=5.7.0)"] @@ -369,6 +400,7 @@ description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -381,7 +413,7 @@ description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -462,7 +494,7 @@ description = "Universal encoding detector for Python 3" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, @@ -475,6 +507,7 @@ description = "The Real First Universal Charset Detector. Open, modern and activ optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -577,6 +610,7 @@ description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -592,7 +626,7 @@ description = "" optional = true python-versions = ">=3.7,<4.0" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"}, {file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"}, @@ -613,7 +647,7 @@ description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["main", "dev"] -markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +markers = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -626,7 +660,7 @@ description = "Python library for calculating contours of 2D quadrilateral grids optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -701,6 +735,7 @@ description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -770,7 +805,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "cramjam" @@ -779,7 +814,7 @@ description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, @@ -883,6 +918,7 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, @@ -925,10 +961,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] -pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -941,7 +977,7 @@ description = "Composable style cycles" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -951,6 +987,30 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "dagger-io" +version = "0.18.6" +description = "A client package for running Dagger pipelines in Python." +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "dagger_io-0.18.6-py3-none-any.whl", hash = "sha256:357e426ce42ec5b27da87a9e3145e6b5ba5fd02dbbad317ecc1ff09c1e1a44cd"}, + {file = "dagger_io-0.18.6.tar.gz", hash = "sha256:165c1c16165feb12cfdd89fce50c898e6ebc5a9acaaa0b725569317c404e45ce"}, +] + +[package.dependencies] +anyio = ">=3.6.2" +beartype = ">=0.18.2" +cattrs = ">=24.1.0" +gql = {version = ">=3.5.0", extras = ["httpx"]} +opentelemetry-exporter-otlp-proto-http = ">=1.23.0" +opentelemetry-sdk = ">=1.23.0" +platformdirs = ">=2.6.2" +rich = ">=10.11.0" +typing-extensions = ">=4.13.0" + [[package]] name = "dataclasses-json" version = "0.6.7" @@ -958,7 +1018,7 @@ description = "Easily serialize dataclasses to and from JSON." optional = true python-versions = "<4.0,>=3.7" groups = ["main"] -markers = "extra == \"vector-db-based\" or extra == \"file-based\"" +markers = "(extra == \"vector-db-based\" or extra == \"file-based\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, @@ -968,6 +1028,25 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" +[[package]] +name = "deprecated" +version = "1.2.18" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, + {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] + [[package]] name = "deptry" version = "0.23.0" @@ -975,6 +1054,7 @@ description = "A command line utility to check for unused, missing and transitiv optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "deptry-0.23.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1f2a6817a37d76e8f6b667381b7caf6ea3e6d6c18b5be24d36c625f387c79852"}, {file = "deptry-0.23.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:9601b64cc0aed42687fdd5c912d5f1e90d7f7333fb589b14e35bfdfebae866f3"}, @@ -1008,6 +1088,7 @@ description = "Filesystem-like pathing and searching for dictionaries" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576"}, {file = "dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e"}, @@ -1020,6 +1101,7 @@ description = "Dynamic version generation" optional = false python-versions = ">=3.5" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dunamai-1.23.0-py3-none-any.whl", hash = "sha256:a0906d876e92441793c6a423e16a4802752e723e9c9a5aabdc5535df02dbe041"}, {file = "dunamai-1.23.0.tar.gz", hash = "sha256:a163746de7ea5acb6dacdab3a6ad621ebc612ed1e528aaa8beedb8887fccd2c4"}, @@ -1035,7 +1117,7 @@ description = "Emoji for Python" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b"}, {file = "emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b"}, @@ -1051,7 +1133,7 @@ description = "An implementation of lxml.xmlfile for the standard library" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, @@ -1064,7 +1146,7 @@ description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "dev"] -markers = "python_version == \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -1080,7 +1162,7 @@ description = "Fast read/write of AVRO files" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\" or extra == \"file-based\"" +markers = "(extra == \"vector-db-based\" or extra == \"file-based\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"}, {file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"}, @@ -1122,7 +1204,7 @@ description = "Infer file type and MIME type of any file/buffer. No external dep optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, @@ -1135,6 +1217,7 @@ description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, @@ -1152,7 +1235,7 @@ description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "fonttools-4.55.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3b332ea7b7f5f3d99f9bc5a28a23c3824ae72711abf7c4e1d62fa21699fdebe7"}, {file = "fonttools-4.55.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d8f925909256e62152e7c3e192655dbca3ab8c3cdef7d7b436732727e80feb6"}, @@ -1207,18 +1290,18 @@ files = [ ] [package.extras] -all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] +interpolatable = ["munkres", "pycairo", "scipy"] lxml = ["lxml (>=4.0)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] repacker = ["uharfbuzz (>=0.23.0)"] symfont = ["sympy"] -type1 = ["xattr ; sys_platform == \"darwin\""] +type1 = ["xattr"] ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] -woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "freezegun" @@ -1227,6 +1310,7 @@ description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, @@ -1242,7 +1326,7 @@ description = "A list-like structure which implements collections.abc.MutableSeq optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1345,6 +1429,7 @@ description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, @@ -1357,6 +1442,7 @@ description = "Google API client core library" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_api_core-2.24.2-py3-none-any.whl", hash = "sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9"}, {file = "google_api_core-2.24.2.tar.gz", hash = "sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696"}, @@ -1366,12 +1452,12 @@ files = [ google-auth = ">=2.14.1,<3.0.0" googleapis-common-protos = ">=1.56.2,<2.0.0" grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, ] proto-plus = ">=1.22.3,<2.0.0" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" @@ -1379,7 +1465,7 @@ requests = ">=2.18.0,<3.0.0" [package.extras] async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] @@ -1390,6 +1476,7 @@ description = "Google Authentication Library" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, @@ -1403,11 +1490,11 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] enterprise-cert = ["cryptography", "pyopenssl"] -pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] -pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +pyjwt = ["cryptography (<39.0.0)", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0)", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0)"] -testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0)", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] urllib3 = ["packaging", "urllib3"] [[package]] @@ -1417,6 +1504,7 @@ description = "Google Cloud Secret Manager API client library" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_cloud_secret_manager-2.23.3-py3-none-any.whl", hash = "sha256:fe06ebb2f71eb739ecc6c14ea9e8dafcb9bbc6123b78b2f8986ece6733d23a1a"}, {file = "google_cloud_secret_manager-2.23.3.tar.gz", hash = "sha256:598c4c0a9d10d49d500eb4aea3255dff250aa2f92c028f5c97e3b367f768c808"}, @@ -1436,6 +1524,7 @@ description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, @@ -1448,6 +1537,50 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0)"] +[[package]] +name = "gql" +version = "3.5.2" +description = "GraphQL client for Python" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "gql-3.5.2-py2.py3-none-any.whl", hash = "sha256:c830ffc38b3997b2a146317b27758305ab3d0da3bde607b49f34e32affb23ba2"}, + {file = "gql-3.5.2.tar.gz", hash = "sha256:07e1325b820c8ba9478e95de27ce9f23250486e7e79113dbb7659a442dc13e74"}, +] + +[package.dependencies] +anyio = ">=3.0,<5" +backoff = ">=1.11.1,<3.0" +graphql-core = ">=3.2,<3.2.5" +httpx = {version = ">=0.23.1,<1", optional = true, markers = "extra == \"httpx\""} +yarl = ">=1.6,<2.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] +all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] +botocore = ["botocore (>=1.21,<2)"] +dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] +httpx = ["httpx (>=0.23.1,<1)"] +requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] +test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] +test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)"] +websockets = ["websockets (>=10,<12)"] + +[[package]] +name = "graphql-core" +version = "3.2.4" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "graphql-core-3.2.4.tar.gz", hash = "sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264"}, + {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, +] + [[package]] name = "greenlet" version = "3.1.1" @@ -1455,81 +1588,39 @@ description = "Lightweight in-process concurrent programming" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"vector-db-based\" or extra == \"sql\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +markers = "(extra == \"vector-db-based\" or extra == \"sql\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -1543,6 +1634,7 @@ description = "IAM API client library" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351"}, {file = "grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20"}, @@ -1560,6 +1652,7 @@ description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, @@ -1624,6 +1717,7 @@ description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, @@ -1638,9 +1732,10 @@ protobuf = ">=4.21.6" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = true +optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1650,9 +1745,10 @@ files = [ name = "httpcore" version = "1.0.7" description = "A minimal low-level HTTP client." -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -1672,9 +1768,10 @@ trio = ["trio (>=0.22.0,<1.0)"] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -1687,7 +1784,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1700,6 +1797,7 @@ description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1712,10 +1810,10 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, @@ -1727,7 +1825,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -1736,6 +1834,7 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1748,6 +1847,7 @@ description = "An ISO 8601 date/time/duration parser and formatter" optional = false python-versions = "*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, @@ -1763,6 +1863,7 @@ description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -1781,6 +1882,7 @@ description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -1793,6 +1895,7 @@ description = "Apply JSON-Patches (RFC 6902)" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, @@ -1808,6 +1911,7 @@ description = "Identify specific nodes in a JSON document (RFC 6901)" optional = true python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -1820,6 +1924,7 @@ description = "An implementation of JSON Reference for Python" optional = false python-versions = "*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, @@ -1832,6 +1937,7 @@ description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, @@ -1852,7 +1958,7 @@ description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -1943,7 +2049,7 @@ description = "Building applications with LLMs through composability" optional = true python-versions = "<4.0,>=3.8.1" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "langchain-0.1.16-py3-none-any.whl", hash = "sha256:bc074cc5e51fad79b9ead1572fc3161918d0f614a6c8f0460543d505ad249ac7"}, {file = "langchain-0.1.16.tar.gz", hash = "sha256:b6bce78f8c071baa898884accfff15c3d81da2f0dd86c20e2f4c80b41463f49f"}, @@ -1972,11 +2078,11 @@ cli = ["typer (>=0.9.0,<0.10.0)"] cohere = ["cohere (>=4,<6)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0) ; python_full_version >= \"3.8.1\" and python_version < \"3.12\"", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0) ; python_full_version >= \"3.8.1\" and python_full_version != \"3.9.7\" and python_version < \"4.0\"", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] -openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0) ; python_version >= \"3.9\""] -qdrant = ["qdrant-client (>=1.3.1,<2.0.0) ; python_full_version >= \"3.8.1\" and python_version < \"3.12\""] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] text-helpers = ["chardet (>=5.1.0,<6.0.0)"] [[package]] @@ -1986,7 +2092,7 @@ description = "Community contributed LangChain integrations." optional = true python-versions = "<4.0,>=3.8.1" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "langchain_community-0.0.32-py3-none-any.whl", hash = "sha256:406977009999952d0705de3806de2b4867e9bb8eda8ca154a59c7a8ed58da38d"}, {file = "langchain_community-0.0.32.tar.gz", hash = "sha256:1510217d646c8380f54e9850351f6d2a0b0dd73c501b666c6f4b40baa8160b29"}, @@ -2005,7 +2111,7 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0) ; python_full_version >= \"3.8.1\" and python_version < \"3.12\"", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0) ; python_full_version >= \"3.8.1\" and python_full_version != \"3.9.7\" and python_version < \"4.0\"", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] [[package]] name = "langchain-core" @@ -2014,6 +2120,7 @@ description = "Building applications with LLMs through composability" optional = true python-versions = "<4.0,>=3.8.1" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, @@ -2037,7 +2144,7 @@ description = "LangChain text splitting utilities" optional = true python-versions = "<4.0,>=3.8.1" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"}, {file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"}, @@ -2056,7 +2163,7 @@ description = "Language detection library ported from Google's language-detectio optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, @@ -2072,6 +2179,7 @@ description = "Client library to connect to the LangSmith LLM Tracing and Evalua optional = true python-versions = "<4.0,>=3.8.1" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15"}, {file = "langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a"}, @@ -2097,6 +2205,7 @@ description = "Links recognition library with FULL unicode support." optional = false python-versions = ">=3.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, @@ -2118,7 +2227,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, @@ -2274,7 +2383,7 @@ description = "Python implementation of John Gruber's Markdown." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, @@ -2291,6 +2400,7 @@ description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -2318,6 +2428,7 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -2389,7 +2500,7 @@ description = "A lightweight library for converting complex datatypes to and fro optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\" or extra == \"file-based\"" +markers = "(extra == \"vector-db-based\" or extra == \"file-based\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "marshmallow-3.25.1-py3-none-any.whl", hash = "sha256:ec5d00d873ce473b7f2ffcb7104286a376c354cab0c2fa12f5573dab03e87210"}, {file = "marshmallow-3.25.1.tar.gz", hash = "sha256:f4debda3bb11153d81ac34b0d582bf23053055ee11e791b54b4b35493468040a"}, @@ -2410,7 +2521,7 @@ description = "Python plotting package" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"}, {file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"}, @@ -2469,6 +2580,7 @@ description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -2481,6 +2593,7 @@ description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, @@ -2501,6 +2614,7 @@ description = "Markdown URL utilities" optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -2513,6 +2627,7 @@ description = "A memory profiler for Python applications" optional = false python-versions = ">=3.7.0" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "memray-1.15.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9b623c0c651d611dd068236566a8a202250e3d59307c3a3f241acc47835e73eb"}, {file = "memray-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74765f92887b7eed152e3b9f14c147c43bf0247417b18c7ea0dec173cd01633c"}, @@ -2566,19 +2681,19 @@ textual = ">=0.41.0" [package.extras] benchmark = ["asv"] -dev = ["Cython", "IPython", "asv", "black", "bump2version", "check-manifest", "flake8", "furo", "greenlet ; python_version < \"3.14\"", "ipython", "isort", "mypy", "packaging", "pytest", "pytest-cov", "pytest-textual-snapshot", "setuptools ; python_version >= \"3.12\"", "sphinx", "sphinx-argparse", "textual (>=0.43,!=0.65.2,!=0.66)", "towncrier"] +dev = ["Cython", "IPython", "asv", "black", "bump2version", "check-manifest", "flake8", "furo", "greenlet", "ipython", "isort", "mypy", "packaging", "pytest", "pytest-cov", "pytest-textual-snapshot", "setuptools", "sphinx", "sphinx-argparse", "textual (>=0.43,!=0.65.2,!=0.66)", "towncrier"] docs = ["IPython", "bump2version", "furo", "sphinx", "sphinx-argparse", "towncrier"] lint = ["black", "check-manifest", "flake8", "isort", "mypy"] -test = ["Cython", "greenlet ; python_version < \"3.14\"", "ipython", "packaging", "pytest", "pytest-cov", "pytest-textual-snapshot", "setuptools ; python_version >= \"3.12\"", "textual (>=0.43,!=0.65.2,!=0.66)"] +test = ["Cython", "greenlet", "ipython", "packaging", "pytest", "pytest-cov", "pytest-textual-snapshot", "setuptools", "textual (>=0.43,!=0.65.2,!=0.66)"] [[package]] name = "multidict" version = "6.1.0" description = "multidict implementation" -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -2684,6 +2799,7 @@ description = "Optional static typing for Python" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, @@ -2748,7 +2864,7 @@ files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -markers = {main = "extra == \"vector-db-based\" or extra == \"file-based\""} +markers = {main = "(extra == \"vector-db-based\" or extra == \"file-based\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "nltk" @@ -2757,6 +2873,7 @@ description = "Natural Language Toolkit" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, @@ -2783,6 +2900,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -2829,7 +2947,7 @@ description = "Python client library for the OpenAI API" optional = true python-versions = ">=3.7.1" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, @@ -2862,7 +2980,7 @@ description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, @@ -2871,6 +2989,112 @@ files = [ [package.dependencies] et-xmlfile = "*" +[[package]] +name = "opentelemetry-api" +version = "1.32.1" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_api-1.32.1-py3-none-any.whl", hash = "sha256:bbd19f14ab9f15f0e85e43e6a958aa4cb1f36870ee62b7fd205783a112012724"}, + {file = "opentelemetry_api-1.32.1.tar.gz", hash = "sha256:a5be71591694a4d9195caf6776b055aa702e964d961051a0715d05f8632c32fb"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<8.7.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.32.1" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.32.1-py3-none-any.whl", hash = "sha256:a1e9ad3d0d9a9405c7ff8cdb54ba9b265da16da9844fe36b8c9661114b56c5d9"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.32.1.tar.gz", hash = "sha256:da4edee4f24aaef109bfe924efad3a98a2e27c91278115505b298ee61da5d68e"}, +] + +[package.dependencies] +opentelemetry-proto = "1.32.1" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.32.1" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.32.1-py3-none-any.whl", hash = "sha256:3cc048b0c295aa2cbafb883feaf217c7525b396567eeeabb5459affb08b7fefe"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.32.1.tar.gz", hash = "sha256:f854a6e7128858213850dbf1929478a802faf50e799ffd2eb4d7424390023828"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.32.1" +opentelemetry-proto = "1.32.1" +opentelemetry-sdk = ">=1.32.1,<1.33.0" +requests = ">=2.7,<3.0" + +[[package]] +name = "opentelemetry-proto" +version = "1.32.1" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_proto-1.32.1-py3-none-any.whl", hash = "sha256:fe56df31033ab0c40af7525f8bf4c487313377bbcfdf94184b701a8ccebc800e"}, + {file = "opentelemetry_proto-1.32.1.tar.gz", hash = "sha256:bc6385ccf87768f029371535312071a2d09e6c9ebf119ac17dbc825a6a56ba53"}, +] + +[package.dependencies] +protobuf = ">=5.0,<6.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.32.1" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_sdk-1.32.1-py3-none-any.whl", hash = "sha256:bba37b70a08038613247bc42beee5a81b0ddca422c7d7f1b097b32bf1c7e2f17"}, + {file = "opentelemetry_sdk-1.32.1.tar.gz", hash = "sha256:8ef373d490961848f525255a42b193430a0637e064dd132fd2a014d94792a092"}, +] + +[package.dependencies] +opentelemetry-api = "1.32.1" +opentelemetry-semantic-conventions = "0.53b1" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.53b1" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_semantic_conventions-0.53b1-py3-none-any.whl", hash = "sha256:21df3ed13f035f8f3ea42d07cbebae37020367a53b47f1ebee3b10a381a00208"}, + {file = "opentelemetry_semantic_conventions-0.53b1.tar.gz", hash = "sha256:4c5a6fede9de61211b2e9fc1e02e8acacce882204cd770177342b6a3be682992"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.32.1" + [[package]] name = "orjson" version = "3.10.15" @@ -2878,6 +3102,7 @@ description = "Fast, correct Python JSON library supporting dataclasses, datetim optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -2967,6 +3192,7 @@ description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, @@ -2979,6 +3205,7 @@ description = "Powerful data structures for data analysis, time series, and stat optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, @@ -3013,9 +3240,9 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -3053,7 +3280,7 @@ description = "Type annotations for pandas" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "pandas_stubs-2.2.3.241126-py3-none-any.whl", hash = "sha256:74aa79c167af374fe97068acc90776c0ebec5266a6e5c69fe11e9c2cf51f2267"}, {file = "pandas_stubs-2.2.3.241126.tar.gz", hash = "sha256:cf819383c6d9ae7d4dabf34cd47e1e45525bb2f312e6ad2939c2c204cb708acd"}, @@ -3070,6 +3297,7 @@ description = "Bring colors to your terminal." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, @@ -3082,7 +3310,7 @@ description = "A wrapper around the pdftoppm and pdftocairo command line tools t optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, @@ -3098,7 +3326,7 @@ description = "PDF parser and analyzer" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, @@ -3120,6 +3348,7 @@ description = "API Documentation for Python Projects" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pdoc-15.0.1-py3-none-any.whl", hash = "sha256:fd437ab8eb55f9b942226af7865a3801e2fb731665199b74fd9a44737dbe20f9"}, {file = "pdoc-15.0.1.tar.gz", hash = "sha256:3b08382c9d312243ee6c2a1813d0ff517a6ab84d596fa2c6c6b5255b17c3d666"}, @@ -3137,7 +3366,7 @@ description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\" or extra == \"file-based\"" +markers = "(extra == \"vector-db-based\" or extra == \"file-based\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -3217,7 +3446,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions ; python_version < \"3.10\""] +typing = ["typing-extensions"] xmp = ["defusedxml"] [[package]] @@ -3227,6 +3456,7 @@ description = "A small Python package for determining appropriate platform-speci optional = false python-versions = ">=3.8" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -3244,7 +3474,7 @@ description = "An open-source, interactive data visualization library for Python optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, @@ -3261,6 +3491,7 @@ description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -3277,6 +3508,7 @@ description = "A task runner that works well with poetry." optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "poethepoet-0.24.4-py3-none-any.whl", hash = "sha256:fb4ea35d7f40fe2081ea917d2e4102e2310fda2cde78974050ca83896e229075"}, {file = "poethepoet-0.24.4.tar.gz", hash = "sha256:ff4220843a87c888cbcb5312c8905214701d0af60ac7271795baa8369b428fef"}, @@ -3293,10 +3525,10 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "propcache" version = "0.2.1" description = "Accelerated property cache" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -3389,6 +3621,7 @@ description = "Beautiful, Pythonic protocol buffers" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, @@ -3402,21 +3635,24 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "6.30.2" +version = "5.29.4" description = "" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "protobuf-6.30.2-cp310-abi3-win32.whl", hash = "sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103"}, - {file = "protobuf-6.30.2-cp310-abi3-win_amd64.whl", hash = "sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9"}, - {file = "protobuf-6.30.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b"}, - {file = "protobuf-6.30.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815"}, - {file = "protobuf-6.30.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d"}, - {file = "protobuf-6.30.2-cp39-cp39-win32.whl", hash = "sha256:524afedc03b31b15586ca7f64d877a98b184f007180ce25183d1a5cb230ee72b"}, - {file = "protobuf-6.30.2-cp39-cp39-win_amd64.whl", hash = "sha256:acec579c39c88bd8fbbacab1b8052c793efe83a0a5bd99db4a31423a25c0a0e2"}, - {file = "protobuf-6.30.2-py3-none-any.whl", hash = "sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51"}, - {file = "protobuf-6.30.2.tar.gz", hash = "sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048"}, + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, ] [[package]] @@ -3426,6 +3662,7 @@ description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, @@ -3457,7 +3694,7 @@ description = "Python library for Apache Arrow" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69"}, {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec"}, @@ -3513,6 +3750,7 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -3525,6 +3763,7 @@ description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, @@ -3540,6 +3779,7 @@ description = "Python style guide checker" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, @@ -3552,7 +3792,7 @@ description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -3565,6 +3805,7 @@ description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, @@ -3577,7 +3818,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" @@ -3586,6 +3827,7 @@ description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -3699,6 +3941,7 @@ description = "passive checker of Python programs" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, @@ -3711,6 +3954,7 @@ description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -3726,6 +3970,7 @@ description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -3744,6 +3989,7 @@ description = "A development tool to measure, monitor and analyze the memory beh optional = false python-versions = ">=3.6" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Pympler-1.1-py3-none-any.whl", hash = "sha256:5b223d6027d0619584116a0cbc28e8d2e378f7a79c1e5e024f9ff3b673c58506"}, {file = "pympler-1.1.tar.gz", hash = "sha256:1eaa867cb8992c218430f1708fdaccda53df064144d1c5656b1e6f1ee6000424"}, @@ -3759,7 +4005,7 @@ description = "pyparsing module - Classes and methods to define and execute pars optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -3775,6 +4021,7 @@ description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connec optional = false python-versions = ">=3.8.1" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyproject_flake8-6.1.0-py3-none-any.whl", hash = "sha256:86ea5559263c098e1aa4f866776aa2cf45362fd91a576b9fd8fbbbb55db12c4e"}, {file = "pyproject_flake8-6.1.0.tar.gz", hash = "sha256:6da8e5a264395e0148bc11844c6fb50546f1fac83ac9210f7328664135f9e70f"}, @@ -3791,6 +4038,7 @@ description = "Python Rate-Limiter using Leaky-Bucket Algorithm" optional = false python-versions = ">=3.8,<4.0" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, @@ -3807,6 +4055,7 @@ description = "Persistent/Functional/Immutable data structures" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, @@ -3849,7 +4098,7 @@ description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "pytesseract-0.3.10-py3-none-any.whl", hash = "sha256:8f22cc98f765bf13517ead0c70effedb46c153540d25783e04014f28b55a5fc6"}, {file = "pytesseract-0.3.10.tar.gz", hash = "sha256:f1c3a8b0f07fd01a1085d451f5b8315be6eec1d5577a6796d46dc7a62bd4120f"}, @@ -3866,6 +4115,7 @@ description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -3889,6 +4139,7 @@ description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, @@ -3908,6 +4159,7 @@ description = "pytest-httpserver is a httpserver for pytest" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_httpserver-1.1.1-py3-none-any.whl", hash = "sha256:aadc744bfac773a2ea93d05c2ef51fa23c087e3cc5dace3ea9d45cdd4bfe1fe8"}, {file = "pytest_httpserver-1.1.1.tar.gz", hash = "sha256:e5c46c62c0aa65e5d4331228cb2cb7db846c36e429c3e74ca806f284806bf7c6"}, @@ -3923,6 +4175,7 @@ description = "A simple plugin to use with pytest" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_memray-1.7.0-py3-none-any.whl", hash = "sha256:b896718c1adf6d0cd339dfaaaa5620f035c9919e1199a79b3453804a1254306f"}, {file = "pytest_memray-1.7.0.tar.gz", hash = "sha256:c18fa907d2210b42f4096c093e2d3416dfc002dcaa450ef3f9ba819bc3dd8f5f"}, @@ -3944,6 +4197,7 @@ description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -3962,7 +4216,7 @@ description = "Python binding for Rust's library for reading excel and odf file optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "python_calamine-0.2.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f292a03591b1cab1537424851b74baa33b0a55affc315248a7592ba3de1c3e83"}, {file = "python_calamine-0.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6cfbd23d1147f53fd70fddfb38af2a98896ecad069c9a4120e77358a6fc43b39"}, @@ -4073,6 +4327,7 @@ description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -4088,7 +4343,7 @@ description = "Create, read, and update Microsoft Word .docx files." optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"}, {file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"}, @@ -4105,7 +4360,7 @@ description = "ISO 639 language codes, names, and other associated information" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "python_iso639-2024.10.22-py3-none-any.whl", hash = "sha256:02d3ce2e01c6896b30b9cbbd3e1c8ee0d7221250b5d63ea9803e0d2a81fd1047"}, {file = "python_iso639-2024.10.22.tar.gz", hash = "sha256:750f21b6a0bc6baa24253a3d8aae92b582bf93aa40988361cd96852c2c6d9a52"}, @@ -4121,7 +4376,7 @@ description = "File type identification using libmagic" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, @@ -4134,7 +4389,7 @@ description = "Generate and manipulate Open XML PowerPoint (.pptx) files" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, ] @@ -4151,7 +4406,7 @@ description = "Python library for the snappy compression library from Google" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "python_snappy-0.7.3-py3-none-any.whl", hash = "sha256:074c0636cfcd97e7251330f428064050ac81a52c62ed884fc2ddebbb60ed7f50"}, {file = "python_snappy-0.7.3.tar.gz", hash = "sha256:40216c1badfb2d38ac781ecb162a1d0ec40f8ee9747e610bcfefdfa79486cee3"}, @@ -4167,6 +4422,7 @@ description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -4182,6 +4438,7 @@ description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, @@ -4194,7 +4451,7 @@ description = "Python for Window Extensions" optional = false python-versions = "*" groups = ["dev"] -markers = "platform_system == \"Windows\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -4223,6 +4480,7 @@ description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -4286,6 +4544,7 @@ description = "rapid fuzzy string matching" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb8a54543d16ab1b69e2c5ed96cabbff16db044a50eddfc028000138ca9ddf33"}, {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231c8b2efbd7f8d2ecd1ae900363ba168b8870644bb8f2b5aa96e4a7573bde19"}, @@ -4387,6 +4646,7 @@ description = "Alternative regular expression module, to replace re." optional = true python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -4491,6 +4751,7 @@ description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -4513,6 +4774,7 @@ description = "A persistent cache for python requests" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"}, {file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"}, @@ -4544,6 +4806,7 @@ description = "Mock out responses from the requests package" optional = false python-versions = ">=3.5" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, @@ -4562,6 +4825,7 @@ description = "A utility belt for advanced users of python-requests" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -4577,6 +4841,7 @@ description = "This is a small Python module for parsing Pip requirement files." optional = false python-versions = "<4.0,>=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requirements_parser-0.11.0-py3-none-any.whl", hash = "sha256:50379eb50311834386c2568263ae5225d7b9d0867fb55cf4ecc93959de2c2684"}, {file = "requirements_parser-0.11.0.tar.gz", hash = "sha256:35f36dc969d14830bf459803da84f314dc3d17c802592e9e970f63d0359e5920"}, @@ -4593,6 +4858,7 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar optional = false python-versions = ">=3.8.0" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -4613,6 +4879,7 @@ description = "Format click help output nicely with rich" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich_click-1.8.8-py3-none-any.whl", hash = "sha256:205aabd5a98e64ab2c105dee9e368be27480ba004c7dfa2accd0ed44f9f1550e"}, {file = "rich_click-1.8.8.tar.gz", hash = "sha256:547c618dea916620af05d4a6456da797fbde904c97901f44d2f32f89d85d6c84"}, @@ -4634,6 +4901,7 @@ description = "Pure-Python RSA implementation" optional = false python-versions = "<4,>=3.6" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, @@ -4649,6 +4917,7 @@ description = "An extremely fast Python linter and code formatter, written in Ru optional = false python-versions = ">=3.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, @@ -4677,7 +4946,7 @@ description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -4733,7 +5002,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "scipy-1.15.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:c64ded12dcab08afff9e805a67ff4480f5e69993310e093434b10e85dc9d43e1"}, {file = "scipy-1.15.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5b190b935e7db569960b48840e5bef71dc513314cc4e79a1b7d14664f57fd4ff"}, @@ -4783,7 +5052,7 @@ numpy = ">=1.23.5,<2.5" [package.extras] dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "serpyco-rs" @@ -4792,6 +5061,7 @@ description = "" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "serpyco_rs-1.13.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e722b3053e627d8a304e462bce20cae1670a2c4b0ef875b84d0de0081bec4029"}, {file = "serpyco_rs-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f10e89c752ff78d720a42e026b0a9ada70717ad6306a9356f794280167d62bf"}, @@ -4847,19 +5117,20 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "setuptools-76.0.0-py3-none-any.whl", hash = "sha256:199466a166ff664970d0ee145839f5582cb9bca7a0a3a2e795b6a9cb2308e9c6"}, {file = "setuptools-76.0.0.tar.gz", hash = "sha256:43b4ee60e10b0d0ee98ad11918e114c70701bc6051662a9a675a0496c1a158f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -4868,6 +5139,7 @@ description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -4877,9 +5149,10 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -optional = true +optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -4892,7 +5165,7 @@ description = "A modern CSS selector implementation for Beautiful Soup." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -4905,7 +5178,7 @@ description = "Database Abstraction Library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vector-db-based\" or extra == \"sql\"" +markers = "(extra == \"vector-db-based\" or extra == \"sql\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, @@ -5002,7 +5275,7 @@ description = "Pretty-print tabular data" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -5018,6 +5291,7 @@ description = "Retry code until it succeeds" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, @@ -5034,6 +5308,7 @@ description = "Modern Text User Interface framework" optional = false python-versions = "<4.0.0,>=3.8.1" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "textual-1.0.0-py3-none-any.whl", hash = "sha256:2d4a701781c05104925e463ae370c630567c70c2880e92ab838052e3e23c986f"}, {file = "textual-1.0.0.tar.gz", hash = "sha256:bec9fe63547c1c552569d1b75d309038b7d456c03f86dfa3706ddb099b151399"}, @@ -5046,7 +5321,7 @@ rich = ">=13.3.3" typing-extensions = ">=4.4.0,<5.0.0" [package.extras] -syntax = ["tree-sitter (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-bash (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-css (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-go (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-html (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-java (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-javascript (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-json (>=0.24.0) ; python_version >= \"3.9\"", "tree-sitter-markdown (>=0.3.0) ; python_version >= \"3.9\"", "tree-sitter-python (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-regex (>=0.24.0) ; python_version >= \"3.9\"", "tree-sitter-rust (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-sql (>=0.3.0) ; python_version >= \"3.9\"", "tree-sitter-toml (>=0.6.0) ; python_version >= \"3.9\"", "tree-sitter-xml (>=0.7.0) ; python_version >= \"3.9\"", "tree-sitter-yaml (>=0.6.0) ; python_version >= \"3.9\""] +syntax = ["tree-sitter (>=0.23.0)", "tree-sitter-bash (>=0.23.0)", "tree-sitter-css (>=0.23.0)", "tree-sitter-go (>=0.23.0)", "tree-sitter-html (>=0.23.0)", "tree-sitter-java (>=0.23.0)", "tree-sitter-javascript (>=0.23.0)", "tree-sitter-json (>=0.24.0)", "tree-sitter-markdown (>=0.3.0)", "tree-sitter-python (>=0.23.0)", "tree-sitter-regex (>=0.24.0)", "tree-sitter-rust (>=0.23.0)", "tree-sitter-sql (>=0.3.0)", "tree-sitter-toml (>=0.6.0)", "tree-sitter-xml (>=0.7.0)", "tree-sitter-yaml (>=0.6.0)"] [[package]] name = "threadpoolctl" @@ -5055,7 +5330,7 @@ description = "threadpoolctl" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -5068,7 +5343,7 @@ description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"}, {file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"}, @@ -5151,7 +5426,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -markers = {main = "python_version == \"3.10\""} +markers = {main = "python_version < \"3.11\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "tqdm" @@ -5160,6 +5435,7 @@ description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -5182,6 +5458,7 @@ description = "Typing stubs for cachetools" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0"}, {file = "types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2"}, @@ -5194,6 +5471,7 @@ description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -5206,7 +5484,7 @@ description = "Typing stubs for pytz" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "types_pytz-2024.2.0.20241221-py3-none-any.whl", hash = "sha256:8fc03195329c43637ed4f593663df721fef919b60a969066e22606edf0b53ad5"}, {file = "types_pytz-2024.2.0.20241221.tar.gz", hash = "sha256:06d7cde9613e9f7504766a0554a270c369434b50e00975b3a4a0f6eed0f2c1a9"}, @@ -5219,6 +5497,7 @@ description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -5231,6 +5510,7 @@ description = "Typing stubs for requests" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -5246,6 +5526,7 @@ description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_setuptools-75.8.2.20250305-py3-none-any.whl", hash = "sha256:ba80953fd1f5f49e552285c024f75b5223096a38a5138a54d18ddd3fa8f6a2d4"}, {file = "types_setuptools-75.8.2.20250305.tar.gz", hash = "sha256:a987269b49488f21961a1d99aa8d281b611625883def6392a93855b31544e405"}, @@ -5256,14 +5537,15 @@ setuptools = "*" [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] @@ -5273,7 +5555,7 @@ description = "Runtime inspection utilities for typing module." optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"vector-db-based\" or extra == \"file-based\"" +markers = "(extra == \"vector-db-based\" or extra == \"file-based\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -5290,6 +5572,7 @@ description = "Provider of IANA time zone data" optional = false python-versions = ">=2" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -5302,6 +5585,7 @@ description = "Micro subset of unicode data files for linkify-it-py projects." optional = false python-versions = ">=3.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, @@ -5317,7 +5601,7 @@ description = "A library that prepares raw documents for downstream ML tasks." optional = true python-versions = ">=3.7.0" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "unstructured-0.10.27-py3-none-any.whl", hash = "sha256:3a8a8e44302388ddc39c184059e8b4458f1cdc58032540b9af7d85f6c3eca3be"}, {file = "unstructured-0.10.27.tar.gz", hash = "sha256:f567b5c4385993a9ab48db5563dd7b413aac4f2002bb22e6250496ea8f440f5e"}, @@ -5399,7 +5683,7 @@ description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "unstructured.pytesseract-0.3.13-py3-none-any.whl", hash = "sha256:8001bc860470d56185176eb3ceb4623e888eba058ca3b30af79003784bc40e19"}, {file = "unstructured.pytesseract-0.3.13.tar.gz", hash = "sha256:ff2e6391496e457dbf4b4e327f4a4577cce18921ea6570dc74bd64381b10e963"}, @@ -5416,6 +5700,7 @@ description = "URL normalization for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, @@ -5431,13 +5716,14 @@ description = "HTTP library with thread-safe connection pooling, file post, and optional = false python-versions = ">=3.9" groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -5449,6 +5735,7 @@ description = "Wildcard/glob file name matcher." optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcmatch-10.0-py3-none-any.whl", hash = "sha256:0dd927072d03c0a6527a20d2e6ad5ba8d0380e60870c383bc533b71744df7b7a"}, {file = "wcmatch-10.0.tar.gz", hash = "sha256:e72f0de09bba6a04e0de70937b0cf06e55f36f37b3deb422dfaf854b867b840a"}, @@ -5464,6 +5751,7 @@ description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -5482,6 +5770,7 @@ description = "Modern datetime library for Python" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "whenever-0.6.16-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:901783ba877b5d73ce5b1bc1697c6097a9ac14c43064788b24ec7dc75a85a90a"}, {file = "whenever-0.6.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d374cd750ea68adb4ad69d52aef3838eda38ae63183c6135b122772ac053c66"}, @@ -5559,6 +5848,96 @@ files = [ [package.dependencies] tzdata = {version = ">=2020.1", markers = "sys_platform == \"win32\""} +[[package]] +name = "wrapt" +version = "1.17.2" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, +] + [[package]] name = "xlsxwriter" version = "3.2.0" @@ -5566,7 +5945,7 @@ description = "A Python module for creating Excel XLSX files." optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"file-based\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"file-based\"" files = [ {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, @@ -5579,6 +5958,7 @@ description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, @@ -5588,10 +5968,10 @@ files = [ name = "yarl" version = "1.18.3" description = "Yet another URL library" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -5686,21 +6066,21 @@ propcache = ">=0.2.0" name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] @@ -5712,4 +6092,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "3462b02e03e9f7edd9dec23acde60ad4f7128ace08ce1787556aa0b4bcd25a86" +content-hash = "cde25949898eaef637105fd376a030b96b30b197e819dff2d015e03479c3b4ac" diff --git a/pyproject.toml b/pyproject.toml index 74d2f6d56..24d4696ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,8 @@ sqlalchemy = {version = "^2.0,!=2.0.36", optional = true } xmltodict = ">=0.13,<0.15" anyascii = "^0.3.2" whenever = "^0.6.16" +dagger-io = "^0.18.6" +anyio = "<4.0.0" [tool.poetry.group.dev.dependencies] freezegun = "*" From d3287a1bd1f8cf72955edc6ca2b99731827d3161 Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 8 May 2025 16:27:51 -0700 Subject: [PATCH 16/56] add declarative component schema --- .../declarative_component_schema.yaml | 180 +++++++++++++ .../models/declarative_component_schema.py | 255 +++++++++++++----- .../parsers/model_to_component_factory.py | 3 + airbyte_cdk/sources/declarative/spec/spec.py | 59 +++- 4 files changed, 436 insertions(+), 61 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index ca595d92e..89ecc86eb 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -3803,6 +3803,33 @@ definitions: title: Advanced Auth description: Advanced specification for configuring the authentication flow. "$ref": "#/definitions/AuthFlow" + config_normalization_rules: + title: Config Normalization Rules + type: object + additional_properties: false + properties: + config_migrations: + title: Config Migrations + description: The config will be migrated according to these transformations and updated within the platform for subsequent syncs. + type: array + items: + anyOf: + - "$ref": '#/definitions/ConfigTransformations/RemapField' + transformations: + title: Transformations + description: The list of transformations that will be applied on the incoming config at the start of a sync. + type: array + items: + anyOf: + - "$ref": '#/definitions/ConfigTransformations/RemapField' + validations: + title: Validations + description: The list of validations that will be performed on the incoming config before starting a sync + type: array + items: + anyOf: + - "$ref": "#/definitions/DpathValidator" + - "$ref": "#/definitions/PredicateValidator" SubstreamPartitionRouter: title: Substream Partition Router description: Partition router that is used to retrieve records that have been partitioned according to records from the specified parent streams. An example of a parent stream is automobile brands and the substream would be the various car models associated with each branch. @@ -4164,6 +4191,159 @@ definitions: description: The GraphQL query to be executed default: {} additionalProperties: true + DpathValidator: + title: Dpath Validator + description: Validator that extracts the value located at a given field path. + type: object + required: + - type + - field_path + - validation_strategy + properties: + type: + type: string + enum: [ DpathValidation ] + field_path: + title: Field Path + description: List of potentially nested fields describing the full path of the field to validate. Use "*" to validate all values from an array. + type: array + items: + type: string + interpolation_context: + - config + examples: + - [ "data" ] + - [ "data", "records" ] + - [ "data", "{{ parameters.name }}" ] + - [ "data", "*", "record" ] + validation_strategy: + title: Validation Stragey + description: The condition that the specified config value will be evaluated against + anyOf: + - "$ref": "#/definitions/ValidateAdheresToSchema" + PredicateValidator: + title: Predicate Validator + description: Validator that applies a validation strategy to a specified value. + type: object + required: + - type + - value + - validation_strategy + properties: + type: + type: string + enum: [PredicateValidator] + value: + title: Value + description: The value to be validated. Can be a literal value or interpolated from configuration. + type: + - string + - number + - object + - array + - boolean + - "null" + interpolation_context: + - config + examples: + - "test-value" + - "{{ config['api_version'] }}" + - "{{ config['tenant_id'] }}" + - 123 + validation_strategy: + title: Validation Strategy + description: The validation strategy to apply to the value. + anyOf: + - "$ref": "#/definitions/ValidateAdheresToSchema" + ValidateAdheresToSchema: + title: Validate Adheres To Schema + description: Validates that a user-provided schema adheres to a specified JSON schema. + type: object + required: + - type + - schema + properties: + type: + type: string + enum: [ValidateAdheresToSchema] + schema: + title: JSON Schema + description: The JSON schema used for validation. + type: + - string + - object + interpolation_context: + - config + examples: + - "{{ config['report_validation_schema'] }}" + - | + '{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Person", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The person's name" + }, + "age": { + "type": "integer", + "minimum": 0, + "description": "The person's age" + } + }, + "required": ["name", "age"] + }' + - $schema: "http://json-schema.org/draft-07/schema#" + title: Person + type: object + properties: + name: + type: string + description: "The person's name" + age: + type: integer + minimum: 0 + description: "The person's age" + required: + - name + - age + ConfigTransformations: + RemapField: + title: Remap Field + description: Transformation that remaps a field's value to another value based on a static map. + type: object + required: + - type + - map + - field_path + properties: + type: + type: string + enum: [RemapField] + map: + title: Value Mapping + description: A mapping of original values to new values. When a field value matches a key in this map, it will be replaced with the corresponding value. + type: + - object + - string + additionalProperties: true + examples: + - pending: "in_progress" + done: "completed" + cancelled: "terminated" + - "{{ config['status_mapping'] }}" + field_path: + title: Field Path + description: The path to the field whose value should be remapped. Specified as a list of path components to navigate through nested objects. + type: array + items: + type: string + examples: + - ["status"] + - ["data", "status"] + - ["data", "{{ parameters.name }}", "status"] + - ["data", "*", "status"] interpolation: variables: - title: config diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 0aa9fa569..f3eb57f81 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1,5 +1,3 @@ -# Copyright (c) 2025 Airbyte, Inc., all rights reserved. - # generated by datamodel-codegen: # filename: declarative_component_schema.yaml @@ -620,7 +618,9 @@ class OAuthAuthenticator(BaseModel): scopes: Optional[List[str]] = Field( None, description="List of scopes that should be granted to the access token.", - examples=[["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"]], + examples=[ + ["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"] + ], title="Scopes", ) token_expiry_date: Optional[str] = Field( @@ -1126,24 +1126,28 @@ class OAuthConfigSpecification(BaseModel): class Config: extra = Extra.allow - oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = Field( - None, - description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", - examples=[ - {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, - { - "app_id": { - "type": "string", - "path_in_connector_config": ["info", "app_id"], - } - }, - ], - title="OAuth user input", + oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = ( + Field( + None, + description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", + examples=[ + {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, + { + "app_id": { + "type": "string", + "path_in_connector_config": ["info", "app_id"], + } + }, + ], + title="OAuth user input", + ) ) - oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = Field( - None, - description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', - title="DeclarativeOAuth Connector Specification", + oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = ( + Field( + None, + description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', + title="DeclarativeOAuth Connector Specification", + ) ) complete_oauth_output_specification: Optional[Dict[str, Any]] = Field( None, @@ -1161,7 +1165,9 @@ class Config: complete_oauth_server_input_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }", - examples=[{"client_id": {"type": "string"}, "client_secret": {"type": "string"}}], + examples=[ + {"client_id": {"type": "string"}, "client_secret": {"type": "string"}} + ], title="OAuth input specification", ) complete_oauth_server_output_specification: Optional[Dict[str, Any]] = Field( @@ -1523,6 +1529,62 @@ class Config: query: Dict[str, Any] = Field(..., description="The GraphQL query to be executed") +class ValidateAdheresToSchema(BaseModel): + type: Literal["ValidateAdheresToSchema"] + schema_: Union[str, Dict[str, Any]] = Field( + ..., + alias="schema", + description="The JSON schema used for validation.", + examples=[ + "{{ config['report_validation_schema'] }}", + '\'{\n "$schema": "http://json-schema.org/draft-07/schema#",\n "title": "Person",\n "type": "object",\n "properties": {\n "name": {\n "type": "string",\n "description": "The person\'s name"\n },\n "age": {\n "type": "integer",\n "minimum": 0,\n "description": "The person\'s age"\n }\n },\n "required": ["name", "age"]\n}\'\n', + { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Person", + "type": "object", + "properties": { + "name": {"type": "string", "description": "The person's name"}, + "age": { + "type": "integer", + "minimum": 0, + "description": "The person's age", + }, + }, + "required": ["name", "age"], + }, + ], + title="JSON Schema", + ) + + +class ConfigTransformations(BaseModel): + __root__: Any + + +class RemapField(BaseModel): + type: Literal["RemapField"] + map: Union[Dict[str, Any], str] = Field( + ..., + description="A mapping of original values to new values. When a field value matches a key in this map, it will be replaced with the corresponding value.", + examples=[ + {"pending": "in_progress", "done": "completed", "cancelled": "terminated"}, + "{{ config['status_mapping'] }}", + ], + title="Value Mapping", + ) + field_path: List[str] = Field( + ..., + description="The path to the field whose value should be remapped. Specified as a list of path components to navigate through nested objects.", + examples=[ + ["status"], + ["data", "status"], + ["data", "{{ parameters.name }}", "status"], + ["data", "*", "status"], + ], + title="Field Path", + ) + + class AddedFieldDefinition(BaseModel): type: Literal["AddedFieldDefinition"] path: List[str] = Field( @@ -1887,7 +1949,9 @@ class RecordSelector(BaseModel): description="Responsible for filtering records to be emitted by the Source.", title="Record Filter", ) - schema_normalization: Optional[Union[SchemaNormalization, CustomSchemaNormalization]] = Field( + schema_normalization: Optional[ + Union[SchemaNormalization, CustomSchemaNormalization] + ] = Field( None, description="Responsible for normalization according to the schema.", title="Schema Normalization", @@ -1905,29 +1969,49 @@ class GzipDecoder(BaseModel): decoder: Union[CsvDecoder, GzipDecoder, JsonDecoder, JsonlDecoder] -class Spec(BaseModel): - type: Literal["Spec"] - connection_specification: Dict[str, Any] = Field( +class RequestBodyGraphQL(BaseModel): + type: Literal["RequestBodyGraphQL"] + value: RequestBodyGraphQlQuery + + +class DpathValidator(BaseModel): + type: Literal["DpathValidation"] + field_path: List[str] = Field( ..., - description="A connection specification describing how a the connector can be configured.", - title="Connection Specification", - ) - documentation_url: Optional[str] = Field( - None, - description="URL of the connector's documentation page.", - examples=["https://docs.airbyte.com/integrations/sources/dremio"], - title="Documentation URL", + description='List of potentially nested fields describing the full path of the field to validate. Use "*" to validate all values from an array.', + examples=[ + ["data"], + ["data", "records"], + ["data", "{{ parameters.name }}"], + ["data", "*", "record"], + ], + title="Field Path", ) - advanced_auth: Optional[AuthFlow] = Field( - None, - description="Advanced specification for configuring the authentication flow.", - title="Advanced Auth", + validation_strategy: ValidateAdheresToSchema = Field( + ..., + description="The condition that the specified config value will be evaluated against", + title="Validation Stragey", ) -class RequestBodyGraphQL(BaseModel): - type: Literal["RequestBodyGraphQL"] - value: RequestBodyGraphQlQuery +class PredicateValidator(BaseModel): + type: Literal["PredicateValidator"] + value: Optional[Union[str, float, Dict[str, Any], List[Any], bool]] = Field( + ..., + description="The value to be validated. Can be a literal value or interpolated from configuration.", + examples=[ + "test-value", + "{{ config['api_version'] }}", + "{{ config['tenant_id'] }}", + 123, + ], + title="Value", + ) + validation_strategy: ValidateAdheresToSchema = Field( + ..., + description="The validation strategy to apply to the value.", + title="Validation Strategy", + ) class CompositeErrorHandler(BaseModel): @@ -1985,6 +2069,47 @@ class Config: ) +class ConfigNormalizationRules(BaseModel): + config_migrations: Optional[List[RemapField]] = Field( + None, + description="The config will be migrated according to these transformations and updated within the platform for subsequent syncs.", + title="Config Migrations", + ) + transformations: Optional[List[RemapField]] = Field( + None, + description="The list of transformations that will be applied on the incoming config at the start of a sync.", + title="Transformations", + ) + validations: Optional[List[Union[DpathValidator, PredicateValidator]]] = Field( + None, + description="The list of validations that will be performed on the incoming config before starting a sync", + title="Validations", + ) + + +class Spec(BaseModel): + type: Literal["Spec"] + connection_specification: Dict[str, Any] = Field( + ..., + description="A connection specification describing how a the connector can be configured.", + title="Connection Specification", + ) + documentation_url: Optional[str] = Field( + None, + description="URL of the connector's documentation page.", + examples=["https://docs.airbyte.com/integrations/sources/dremio"], + title="Documentation URL", + ) + advanced_auth: Optional[AuthFlow] = Field( + None, + description="Advanced specification for configuring the authentication flow.", + title="Advanced Auth", + ) + config_normalization_rules: Optional[ConfigNormalizationRules] = Field( + None, title="Config Normalization Rules" + ) + + class DeclarativeSource1(BaseModel): class Config: extra = Extra.forbid @@ -2133,7 +2258,9 @@ class Config: extra = Extra.allow type: Literal["DeclarativeStream"] - name: Optional[str] = Field("", description="The stream name.", example=["Users"], title="Name") + name: Optional[str] = Field( + "", description="The stream name.", example=["Users"], title="Name" + ) retriever: Union[SimpleRetriever, AsyncRetriever, CustomRetriever] = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages.", @@ -2164,7 +2291,7 @@ class Config: ] ] = Field( None, - description="Component used to retrieve the schema for the current stream.", + description="One or many schema loaders can be used to retrieve the schema for the current stream. When multiple schema loaders are defined, schema properties will be merged together. Schema loaders defined first taking precedence in the event of a conflict.", title="Schema Loader", ) transformations: Optional[ @@ -2311,18 +2438,20 @@ class HttpRequester(BaseModelWithDeprecations): description="Allows for retrieving a dynamic set of properties from an API endpoint which can be injected into outbound request using the stream_partition.extra_fields.", title="Fetch Properties from Endpoint", ) - request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = Field( - None, - description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", - examples=[ - {"unit": "day"}, - { - "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' - }, - {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, - {"sort_by[asc]": "updated_at"}, - ], - title="Query Parameters", + request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = ( + Field( + None, + description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", + examples=[ + {"unit": "day"}, + { + "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' + }, + {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, + {"sort_by[asc]": "updated_at"}, + ], + title="Query Parameters", + ) ) request_headers: Optional[Union[Dict[str, str], str]] = Field( None, @@ -2512,7 +2641,9 @@ class QueryProperties(BaseModel): class StateDelegatingStream(BaseModel): type: Literal["StateDelegatingStream"] - name: str = Field(..., description="The stream name.", example=["Users"], title="Name") + name: str = Field( + ..., description="The stream name.", example=["Users"], title="Name" + ) full_refresh_stream: DeclarativeStream = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages when the state is empty or not provided.", @@ -2601,7 +2732,9 @@ class AsyncRetriever(BaseModel): ) download_extractor: Optional[ Union[DpathExtractor, CustomRecordExtractor, ResponseToFileExtractor] - ] = Field(None, description="Responsible for fetching the records from provided urls.") + ] = Field( + None, description="Responsible for fetching the records from provided urls." + ) creation_requester: Union[HttpRequester, CustomRequester] = Field( ..., description="Requester component that describes how to prepare HTTP requests to send to the source API to create the async server-side job.", @@ -2741,10 +2874,12 @@ class DynamicDeclarativeStream(BaseModel): stream_template: DeclarativeStream = Field( ..., description="Reference to the stream template.", title="Stream Template" ) - components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = Field( - ..., - description="Component resolve and populates stream templates with components values.", - title="Components Resolver", + components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = ( + Field( + ..., + description="Component resolve and populates stream templates with components values.", + title="Components Resolver", + ) ) diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 6883f6e70..e83ea1c49 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -3472,6 +3472,9 @@ def create_spec(model: SpecModel, config: Config, **kwargs: Any) -> Spec: documentation_url=model.documentation_url, advanced_auth=model.advanced_auth, parameters={}, + config_migrations=model.config_normalization_rules.config_migrations, + transformations=model.config_normalization_rules.transformations, + validations=model.config_normalization_rules.validations, ) def create_substream_partition_router( diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 914e99e93..220a503bc 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -3,14 +3,21 @@ # from dataclasses import InitVar, dataclass -from typing import Any, Mapping, Optional +import json +from typing import Any, List, Mapping, MutableMapping, Optional +from airbyte_cdk.config_observation import create_connector_config_control_message +from airbyte_cdk.entrypoint import AirbyteEntrypoint from airbyte_cdk.models import ( AdvancedAuth, ConnectorSpecification, ConnectorSpecificationSerializer, ) from airbyte_cdk.sources.declarative.models.declarative_component_schema import AuthFlow +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ConfigTransformation +from airbyte_cdk.sources.declarative.validators.validator import Validator +from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository +from airbyte_cdk.sources.source import Source @dataclass @@ -27,6 +34,10 @@ class Spec: parameters: InitVar[Mapping[str, Any]] documentation_url: Optional[str] = None advanced_auth: Optional[AuthFlow] = None + config_migrations: Optional[List[ConfigTransformation]] = None + config_transformations: Optional[List[ConfigTransformation]] = None + config_validations: Optional[List[Validator]] = None + message_repository: MessageRepository = InMemoryMessageRepository() def generate_spec(self) -> ConnectorSpecification: """ @@ -46,3 +57,49 @@ def generate_spec(self) -> ConnectorSpecification: # We remap these keys to camel case because that's the existing format expected by the rest of the platform return ConnectorSpecificationSerializer.load(obj) + + def migrate_config(self, args: List[str], source: Source, config: MutableMapping[str, Any]) -> None: + """ + Apply all specified config transformations to the provided config and save the modified config to the given path and emit a control message. + + :param args: Command line arguments + :param source: Source instance + :param config: The user-provided config to migrate + """ + config_path = AirbyteEntrypoint(source).extract_config(args) + + mutable_config = dict(config) + for transformation in self.config_migrations: + transformation.transform(mutable_config) + + if mutable_config != config: + with open(config_path, "w") as f: + json.dump(mutable_config, f) + self.message_repository.emit_message(create_connector_config_control_message(mutable_config)) + for message in self.message_repository.consume_queue(): + print(message.json(exclude_unset=True)) + + + def transform_config(self, config: MutableMapping[str, Any]) -> MutableMapping[str, Any]: + """ + Apply all config transformations to the provided config. + + :param config: The user-provided configuration + :return: The transformed configuration + """ + mutable_config = dict(config) + + for transformation in self.config_transformations: + transformation.transform(mutable_config) + + return mutable_config + + + def validate_config(self, config: MutableMapping[str, Any]) -> None: + """ + Apply all config validations to the provided config. + + :param config: The user-provided configuration + """ + for validator in self.config_validations: + validator.validate(config) From f72efc0a4ad53002331b5d9b30d0640383fde34b Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 8 May 2025 16:48:47 -0700 Subject: [PATCH 17/56] fix assertions --- .../declarative/validators/test_dpath_validator.py | 13 +++++++------ .../validators/test_predicate_validator.py | 6 +++--- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/unit_tests/sources/declarative/validators/test_dpath_validator.py b/unit_tests/sources/declarative/validators/test_dpath_validator.py index 9fcb51733..2d4373faa 100644 --- a/unit_tests/sources/declarative/validators/test_dpath_validator.py +++ b/unit_tests/sources/declarative/validators/test_dpath_validator.py @@ -40,8 +40,9 @@ def test_given_invalid_path_when_validate_then_raise_key_error(self): with pytest.raises(KeyError) as context: validator.validate(test_data) - assert "Error validating path" in str(context.exception) - assert not strategy.validate_called + + assert "Error validating path" in str(context.exception) + assert not strategy.validate_called def test_given_strategy_fails_when_validate_then_raise_value_error(self): error_message = "Invalid email format" @@ -53,10 +54,10 @@ def test_given_strategy_fails_when_validate_then_raise_value_error(self): with pytest.raises(ValueError) as context: validator.validate(test_data) - assert "Error validating value" in str(context.exception) - assert error_message in str(context.exception) - assert strategy.validate_called - assert strategy.validated_value == "invalid-email" + assert "Error validating value" in str(context.exception) + assert error_message in str(context.exception) + assert strategy.validate_called + assert strategy.validated_value == "invalid-email" def test_given_empty_path_list_when_validate_then_validate_raises_exception(self): strategy = MockValidationStrategy() diff --git a/unit_tests/sources/declarative/validators/test_predicate_validator.py b/unit_tests/sources/declarative/validators/test_predicate_validator.py index 8a1ab18c7..5ddd0a2b8 100644 --- a/unit_tests/sources/declarative/validators/test_predicate_validator.py +++ b/unit_tests/sources/declarative/validators/test_predicate_validator.py @@ -40,9 +40,9 @@ def test_given_invalid_input_when_validate_then_raise_value_error(self): with pytest.raises(ValueError) as context: validator.validate() - assert error_message in str(context.exception) - assert strategy.validate_called - assert strategy.validated_value == test_value + assert error_message in str(context.exception) + assert strategy.validate_called + assert strategy.validated_value == test_value def test_given_complex_object_when_validate_then_successful(self): strategy = MockValidationStrategy() From 7927a1417ab5c16a614bdd94e1715aaf7fa18479 Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 8 May 2025 16:54:54 -0700 Subject: [PATCH 18/56] remove re-raise --- .../declarative/validators/dpath_validator.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py index e4a5159a8..c5332276e 100644 --- a/airbyte_cdk/sources/declarative/validators/dpath_validator.py +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -47,19 +47,13 @@ def validate(self, input_data: dict[str, Any]) -> None: if "*" in path: try: values = dpath.values(input_data, path) + for value in values: + self.strategy.validate(value) except KeyError as e: raise KeyError(f"Error validating path '{self.field_path}': {e}") - for value in values: - try: - self.strategy.validate(value) - except Exception as e: - raise ValueError(f"Error validating value '{value}': {e}") else: try: value = dpath.get(input_data, path) + self.strategy.validate(value) except KeyError as e: raise KeyError(f"Error validating path '{self.field_path}': {e}") - try: - self.strategy.validate(value) - except Exception as e: - raise ValueError(f"Error validating value '{value}': {e}") From cf1b01cfc2853168d4091919e50055f7337ea782 Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 8 May 2025 17:09:58 -0700 Subject: [PATCH 19/56] update tests and error handling for dpath validator --- .../sources/declarative/validators/dpath_validator.py | 4 ++-- .../declarative/validators/test_dpath_validator.py | 8 +++----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py index c5332276e..e23f35e2d 100644 --- a/airbyte_cdk/sources/declarative/validators/dpath_validator.py +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -50,10 +50,10 @@ def validate(self, input_data: dict[str, Any]) -> None: for value in values: self.strategy.validate(value) except KeyError as e: - raise KeyError(f"Error validating path '{self.field_path}': {e}") + raise ValueError(f"Error validating path '{self.field_path}': {e}") else: try: value = dpath.get(input_data, path) self.strategy.validate(value) except KeyError as e: - raise KeyError(f"Error validating path '{self.field_path}': {e}") + raise ValueError(f"Error validating path '{self.field_path}': {e}") diff --git a/unit_tests/sources/declarative/validators/test_dpath_validator.py b/unit_tests/sources/declarative/validators/test_dpath_validator.py index 2d4373faa..c5426e351 100644 --- a/unit_tests/sources/declarative/validators/test_dpath_validator.py +++ b/unit_tests/sources/declarative/validators/test_dpath_validator.py @@ -38,10 +38,10 @@ def test_given_invalid_path_when_validate_then_raise_key_error(self): test_data = {"user": {"profile": {"email": "test@example.com"}}} - with pytest.raises(KeyError) as context: + with pytest.raises(ValueError) as context: validator.validate(test_data) - assert "Error validating path" in str(context.exception) + assert "Error validating path" in str(context.value) assert not strategy.validate_called def test_given_strategy_fails_when_validate_then_raise_value_error(self): @@ -54,8 +54,6 @@ def test_given_strategy_fails_when_validate_then_raise_value_error(self): with pytest.raises(ValueError) as context: validator.validate(test_data) - assert "Error validating value" in str(context.exception) - assert error_message in str(context.exception) assert strategy.validate_called assert strategy.validated_value == "invalid-email" @@ -73,7 +71,7 @@ def test_given_empty_input_data_when_validate_then_validate_raises_exception(sel test_data = {} - with pytest.raises(KeyError): + with pytest.raises(ValueError): validator.validate(test_data) def test_path_with_wildcard_when_validate_then_validate_is_successful(self): From 4727b285f279149dc1a11a0c0f9165374f4658eb Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 8 May 2025 17:20:19 -0700 Subject: [PATCH 20/56] fix predicate validator test --- .../sources/declarative/validators/test_predicate_validator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unit_tests/sources/declarative/validators/test_predicate_validator.py b/unit_tests/sources/declarative/validators/test_predicate_validator.py index 5ddd0a2b8..1f0b3b7c0 100644 --- a/unit_tests/sources/declarative/validators/test_predicate_validator.py +++ b/unit_tests/sources/declarative/validators/test_predicate_validator.py @@ -40,7 +40,7 @@ def test_given_invalid_input_when_validate_then_raise_value_error(self): with pytest.raises(ValueError) as context: validator.validate() - assert error_message in str(context.exception) + assert error_message in str(context.value) assert strategy.validate_called assert strategy.validated_value == test_value From 5235e6ca83d24e8c4763d195fc4f19096b89fba1 Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 12 May 2025 11:16:47 -0700 Subject: [PATCH 21/56] update typo --- .../sources/declarative/declarative_component_schema.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 89ecc86eb..40972ac9c 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -4202,7 +4202,7 @@ definitions: properties: type: type: string - enum: [ DpathValidation ] + enum: [ DpathValidator ] field_path: title: Field Path description: List of potentially nested fields describing the full path of the field to validate. Use "*" to validate all values from an array. From 4bdc3a75ab0580a54cd7414b1d8b43595a733e59 Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 12 May 2025 11:18:10 -0700 Subject: [PATCH 22/56] initialize empty arrays --- airbyte_cdk/sources/declarative/spec/spec.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 220a503bc..bbf4c241e 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -34,9 +34,9 @@ class Spec: parameters: InitVar[Mapping[str, Any]] documentation_url: Optional[str] = None advanced_auth: Optional[AuthFlow] = None - config_migrations: Optional[List[ConfigTransformation]] = None - config_transformations: Optional[List[ConfigTransformation]] = None - config_validations: Optional[List[Validator]] = None + config_migrations: List[ConfigTransformation] = [] + transformations: List[ConfigTransformation] = [] + validations: List[Validator] = [] message_repository: MessageRepository = InMemoryMessageRepository() def generate_spec(self) -> ConnectorSpecification: From 45b47f4eb8f8a3234716af302597adc47571adce Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 12 May 2025 11:19:53 -0700 Subject: [PATCH 23/56] initialize empty list via field --- airbyte_cdk/sources/declarative/spec/spec.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index bbf4c241e..273bb8ec8 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import InitVar, dataclass +from dataclasses import InitVar, dataclass, field import json from typing import Any, List, Mapping, MutableMapping, Optional @@ -34,9 +34,9 @@ class Spec: parameters: InitVar[Mapping[str, Any]] documentation_url: Optional[str] = None advanced_auth: Optional[AuthFlow] = None - config_migrations: List[ConfigTransformation] = [] - transformations: List[ConfigTransformation] = [] - validations: List[Validator] = [] + config_migrations: List[ConfigTransformation] = field(default_factory=list) + transformations: List[ConfigTransformation] = field(default_factory=list) + validations: List[Validator] = field(default_factory=list) message_repository: MessageRepository = InMemoryMessageRepository() def generate_spec(self) -> ConnectorSpecification: From fa04b9d2bca8f96d64ecbd280169296c49603e73 Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 12 May 2025 11:41:48 -0700 Subject: [PATCH 24/56] update create_spec to build migration/transform/validation components --- .../parsers/model_to_component_factory.py | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index e83ea1c49..0599465f3 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -3465,16 +3465,28 @@ def _get_job_timeout() -> datetime.timedelta: parameters=model.parameters or {}, ) - @staticmethod - def create_spec(model: SpecModel, config: Config, **kwargs: Any) -> Spec: + def create_spec(self, model: SpecModel, config: Config, **kwargs: Any) -> Spec: + config_migrations = [] + transformations = [] + validations = [] + + for migration in model.config_normalization_rules.config_migrations: + config_migrations.append(self._create_component_from_model(migration, config)) + + for transformation in model.config_normalization_rules.transformations: + transformations.append(self._create_component_from_model(transformation, config)) + + for validation in model.config_normalization_rules.validations: + validations.append(self._create_component_from_model(validation, config)) + return Spec( connection_specification=model.connection_specification, documentation_url=model.documentation_url, advanced_auth=model.advanced_auth, parameters={}, - config_migrations=model.config_normalization_rules.config_migrations, - transformations=model.config_normalization_rules.transformations, - validations=model.config_normalization_rules.validations, + config_migrations=config_migrations, + transformations=transformations, + validations=validations, ) def create_substream_partition_router( From 036c2e56e484a0d12cb2973f1240bb89dae88766 Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 12 May 2025 11:53:14 -0700 Subject: [PATCH 25/56] chore: type-check, lint, format --- .../models/declarative_component_schema.py | 106 +++++++----------- .../parsers/model_to_component_factory.py | 31 +++-- airbyte_cdk/sources/declarative/spec/spec.py | 28 +++-- 3 files changed, 81 insertions(+), 84 deletions(-) diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index f3eb57f81..64edc82d5 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -618,9 +618,7 @@ class OAuthAuthenticator(BaseModel): scopes: Optional[List[str]] = Field( None, description="List of scopes that should be granted to the access token.", - examples=[ - ["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"] - ], + examples=[["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"]], title="Scopes", ) token_expiry_date: Optional[str] = Field( @@ -1126,28 +1124,24 @@ class OAuthConfigSpecification(BaseModel): class Config: extra = Extra.allow - oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = ( - Field( - None, - description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", - examples=[ - {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, - { - "app_id": { - "type": "string", - "path_in_connector_config": ["info", "app_id"], - } - }, - ], - title="OAuth user input", - ) + oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = Field( + None, + description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", + examples=[ + {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, + { + "app_id": { + "type": "string", + "path_in_connector_config": ["info", "app_id"], + } + }, + ], + title="OAuth user input", ) - oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = ( - Field( - None, - description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', - title="DeclarativeOAuth Connector Specification", - ) + oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = Field( + None, + description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', + title="DeclarativeOAuth Connector Specification", ) complete_oauth_output_specification: Optional[Dict[str, Any]] = Field( None, @@ -1165,9 +1159,7 @@ class Config: complete_oauth_server_input_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }", - examples=[ - {"client_id": {"type": "string"}, "client_secret": {"type": "string"}} - ], + examples=[{"client_id": {"type": "string"}, "client_secret": {"type": "string"}}], title="OAuth input specification", ) complete_oauth_server_output_specification: Optional[Dict[str, Any]] = Field( @@ -1949,9 +1941,7 @@ class RecordSelector(BaseModel): description="Responsible for filtering records to be emitted by the Source.", title="Record Filter", ) - schema_normalization: Optional[ - Union[SchemaNormalization, CustomSchemaNormalization] - ] = Field( + schema_normalization: Optional[Union[SchemaNormalization, CustomSchemaNormalization]] = Field( None, description="Responsible for normalization according to the schema.", title="Schema Normalization", @@ -1975,7 +1965,7 @@ class RequestBodyGraphQL(BaseModel): class DpathValidator(BaseModel): - type: Literal["DpathValidation"] + type: Literal["DpathValidator"] field_path: List[str] = Field( ..., description='List of potentially nested fields describing the full path of the field to validate. Use "*" to validate all values from an array.', @@ -2071,17 +2061,17 @@ class Config: class ConfigNormalizationRules(BaseModel): config_migrations: Optional[List[RemapField]] = Field( - None, + [], description="The config will be migrated according to these transformations and updated within the platform for subsequent syncs.", title="Config Migrations", ) transformations: Optional[List[RemapField]] = Field( - None, + [], description="The list of transformations that will be applied on the incoming config at the start of a sync.", title="Transformations", ) validations: Optional[List[Union[DpathValidator, PredicateValidator]]] = Field( - None, + [], description="The list of validations that will be performed on the incoming config before starting a sync", title="Validations", ) @@ -2258,9 +2248,7 @@ class Config: extra = Extra.allow type: Literal["DeclarativeStream"] - name: Optional[str] = Field( - "", description="The stream name.", example=["Users"], title="Name" - ) + name: Optional[str] = Field("", description="The stream name.", example=["Users"], title="Name") retriever: Union[SimpleRetriever, AsyncRetriever, CustomRetriever] = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages.", @@ -2438,20 +2426,18 @@ class HttpRequester(BaseModelWithDeprecations): description="Allows for retrieving a dynamic set of properties from an API endpoint which can be injected into outbound request using the stream_partition.extra_fields.", title="Fetch Properties from Endpoint", ) - request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = ( - Field( - None, - description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", - examples=[ - {"unit": "day"}, - { - "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' - }, - {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, - {"sort_by[asc]": "updated_at"}, - ], - title="Query Parameters", - ) + request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = Field( + None, + description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", + examples=[ + {"unit": "day"}, + { + "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' + }, + {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, + {"sort_by[asc]": "updated_at"}, + ], + title="Query Parameters", ) request_headers: Optional[Union[Dict[str, str], str]] = Field( None, @@ -2641,9 +2627,7 @@ class QueryProperties(BaseModel): class StateDelegatingStream(BaseModel): type: Literal["StateDelegatingStream"] - name: str = Field( - ..., description="The stream name.", example=["Users"], title="Name" - ) + name: str = Field(..., description="The stream name.", example=["Users"], title="Name") full_refresh_stream: DeclarativeStream = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages when the state is empty or not provided.", @@ -2732,9 +2716,7 @@ class AsyncRetriever(BaseModel): ) download_extractor: Optional[ Union[DpathExtractor, CustomRecordExtractor, ResponseToFileExtractor] - ] = Field( - None, description="Responsible for fetching the records from provided urls." - ) + ] = Field(None, description="Responsible for fetching the records from provided urls.") creation_requester: Union[HttpRequester, CustomRequester] = Field( ..., description="Requester component that describes how to prepare HTTP requests to send to the source API to create the async server-side job.", @@ -2874,12 +2856,10 @@ class DynamicDeclarativeStream(BaseModel): stream_template: DeclarativeStream = Field( ..., description="Reference to the stream template.", title="Stream Template" ) - components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = ( - Field( - ..., - description="Component resolve and populates stream templates with components values.", - title="Components Resolver", - ) + components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = Field( + ..., + description="Component resolve and populates stream templates with components values.", + title="Components Resolver", ) diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 0599465f3..25166f279 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -32,6 +32,7 @@ ) from airbyte_cdk.models import FailureType, Level from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager +from airbyte_cdk.sources.declarative import transformations from airbyte_cdk.sources.declarative.async_job.job_orchestrator import AsyncJobOrchestrator from airbyte_cdk.sources.declarative.async_job.job_tracker import JobTracker from airbyte_cdk.sources.declarative.async_job.repository import AsyncJobRepository @@ -3467,17 +3468,23 @@ def _get_job_timeout() -> datetime.timedelta: def create_spec(self, model: SpecModel, config: Config, **kwargs: Any) -> Spec: config_migrations = [] - transformations = [] - validations = [] - - for migration in model.config_normalization_rules.config_migrations: - config_migrations.append(self._create_component_from_model(migration, config)) - - for transformation in model.config_normalization_rules.transformations: - transformations.append(self._create_component_from_model(transformation, config)) + config_transformations = [] + config_validations = [] + + if model.config_normalization_rules: + if model.config_normalization_rules.config_migrations: + for migration in model.config_normalization_rules.config_migrations: + config_migrations.append(self._create_component_from_model(migration, config)) + + if model.config_normalization_rules.transformations: + for transformation in model.config_normalization_rules.transformations: + config_transformations.append( + self._create_component_from_model(transformation, config) + ) - for validation in model.config_normalization_rules.validations: - validations.append(self._create_component_from_model(validation, config)) + if model.config_normalization_rules.validations: + for validation in model.config_normalization_rules.validations: + config_validations.append(self._create_component_from_model(validation, config)) return Spec( connection_specification=model.connection_specification, @@ -3485,8 +3492,8 @@ def create_spec(self, model: SpecModel, config: Config, **kwargs: Any) -> Spec: advanced_auth=model.advanced_auth, parameters={}, config_migrations=config_migrations, - transformations=transformations, - validations=validations, + config_transformations=config_transformations, + config_validations=config_validations, ) def create_substream_partition_router( diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 273bb8ec8..1181a4228 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -2,10 +2,12 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import InitVar, dataclass, field import json +from dataclasses import InitVar, dataclass, field from typing import Any, List, Mapping, MutableMapping, Optional +import orjson + from airbyte_cdk.config_observation import create_connector_config_control_message from airbyte_cdk.entrypoint import AirbyteEntrypoint from airbyte_cdk.models import ( @@ -13,8 +15,11 @@ ConnectorSpecification, ConnectorSpecificationSerializer, ) +from airbyte_cdk.models.airbyte_protocol_serializers import AirbyteMessageSerializer from airbyte_cdk.sources.declarative.models.declarative_component_schema import AuthFlow -from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ConfigTransformation +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( + ConfigTransformation, +) from airbyte_cdk.sources.declarative.validators.validator import Validator from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository from airbyte_cdk.sources.source import Source @@ -35,8 +40,8 @@ class Spec: documentation_url: Optional[str] = None advanced_auth: Optional[AuthFlow] = None config_migrations: List[ConfigTransformation] = field(default_factory=list) - transformations: List[ConfigTransformation] = field(default_factory=list) - validations: List[Validator] = field(default_factory=list) + config_transformations: List[ConfigTransformation] = field(default_factory=list) + config_validations: List[Validator] = field(default_factory=list) message_repository: MessageRepository = InMemoryMessageRepository() def generate_spec(self) -> ConnectorSpecification: @@ -58,7 +63,9 @@ def generate_spec(self) -> ConnectorSpecification: # We remap these keys to camel case because that's the existing format expected by the rest of the platform return ConnectorSpecificationSerializer.load(obj) - def migrate_config(self, args: List[str], source: Source, config: MutableMapping[str, Any]) -> None: + def migrate_config( + self, args: List[str], source: Source, config: MutableMapping[str, Any] + ) -> None: """ Apply all specified config transformations to the provided config and save the modified config to the given path and emit a control message. @@ -68,6 +75,9 @@ def migrate_config(self, args: List[str], source: Source, config: MutableMapping """ config_path = AirbyteEntrypoint(source).extract_config(args) + if not config_path: + return + mutable_config = dict(config) for transformation in self.config_migrations: transformation.transform(mutable_config) @@ -75,10 +85,11 @@ def migrate_config(self, args: List[str], source: Source, config: MutableMapping if mutable_config != config: with open(config_path, "w") as f: json.dump(mutable_config, f) - self.message_repository.emit_message(create_connector_config_control_message(mutable_config)) + self.message_repository.emit_message( + create_connector_config_control_message(mutable_config) + ) for message in self.message_repository.consume_queue(): - print(message.json(exclude_unset=True)) - + print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) def transform_config(self, config: MutableMapping[str, Any]) -> MutableMapping[str, Any]: """ @@ -94,7 +105,6 @@ def transform_config(self, config: MutableMapping[str, Any]) -> MutableMapping[s return mutable_config - def validate_config(self, config: MutableMapping[str, Any]) -> None: """ Apply all config validations to the provided config. From 711384c38e48fb6d9fbd28f23bdeca810ab5cdb1 Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 13 May 2025 09:28:07 -0700 Subject: [PATCH 26/56] implement config transformations: AddFields and RemoveFields` --- .../config_transformations/__init__.py | 4 +- .../config_transformations/add_fields.py | 120 ++++++++++++++++++ .../config_transformations/remove_fields.py | 72 +++++++++++ .../config_transformations/test_add_fields.py | 97 ++++++++++++++ .../test_remove_fields.py | 100 +++++++++++++++ 5 files changed, 392 insertions(+), 1 deletion(-) create mode 100644 airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py create mode 100644 airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py create mode 100644 unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py create mode 100644 unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py index 4dbd08976..a6b15a67d 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py @@ -2,6 +2,8 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # +from .add_fields import AddFields from .remap_field import RemapField +from .remove_fields import RemoveFields -__all__ = ["RemapField"] +__all__ = ["RemapField", "AddFields", "RemoveFields"] diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py new file mode 100644 index 000000000..6af8628f9 --- /dev/null +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py @@ -0,0 +1,120 @@ +# +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass, field +from typing import Any, List, MutableMapping, Optional, Type, Union + +import dpath + +from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( + ConfigTransformation, +) +from airbyte_cdk.sources.types import FieldPointer + + +@dataclass(frozen=True) +class AddedFieldDefinition: + """Defines the field to add on a config""" + + path: FieldPointer + value: Union[InterpolatedString, str] + value_type: Optional[Type[Any]] = None + + +@dataclass(frozen=True) +class ParsedAddFieldDefinition: + """Defines the field to add on a config""" + + path: FieldPointer + value: InterpolatedString + value_type: Optional[Type[Any]] = None + + +@dataclass +class AddFields(ConfigTransformation): + """ + Transformation which adds fields to a config. The path of the added field can be nested. Adding nested fields will create all + necessary parent objects (like mkdir -p). + + This transformation has access to the config being transformed. + + Examples of instantiating this transformation via YAML: + - type: AddFields + fields: + # hardcoded constant + - path: ["path"] + value: "static_value" + + # nested path + - path: ["path", "to", "field"] + value: "static" + + # from config + - path: ["derived_field"] + value: "{{ config.original_field }}" + + # by supplying any valid Jinja template directive or expression + - path: ["two_times_two"] + value: "{{ 2 * 2 }}" + + Attributes: + fields (List[AddedFieldDefinition]): A list of transformations (path and corresponding value) that will be added to the config + """ + + fields: List[AddedFieldDefinition] + condition: str = "" + _parsed_fields: List[ParsedAddFieldDefinition] = field( + init=False, repr=False, default_factory=list + ) + + def __post_init__(self) -> None: + self._filter_interpolator = InterpolatedBoolean(condition=self.condition, parameters={}) + + for add_field in self.fields: + if len(add_field.path) < 1: + raise ValueError( + f"Expected a non-zero-length path for the AddFields transformation {add_field}" + ) + + if not isinstance(add_field.value, InterpolatedString): + if not isinstance(add_field.value, str): + raise ValueError( + f"Expected a string value for the AddFields transformation: {add_field}" + ) + else: + self._parsed_fields.append( + ParsedAddFieldDefinition( + add_field.path, + InterpolatedString.create(add_field.value, parameters={}), + value_type=add_field.value_type, + ) + ) + else: + self._parsed_fields.append( + ParsedAddFieldDefinition( + add_field.path, + add_field.value, + value_type=add_field.value_type, + ) + ) + + def transform( + self, + config: MutableMapping[str, Any], + ) -> None: + """ + Transforms a config by adding fields based on the provided field definitions. + + :param config: The user-provided configuration to be transformed + """ + for parsed_field in self._parsed_fields: + valid_types = (parsed_field.value_type,) if parsed_field.value_type else None + value = parsed_field.value.eval(config, valid_types=valid_types) + is_empty_condition = not self.condition + if is_empty_condition or self._filter_interpolator.eval( + config, value=value, path=parsed_field.path + ): + dpath.new(config, parsed_field.path, value) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py new file mode 100644 index 000000000..bec2806de --- /dev/null +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py @@ -0,0 +1,72 @@ +# +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. +# +import logging +from dataclasses import dataclass +from typing import Any, List, MutableMapping + +import dpath +import dpath.exceptions + +from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( + ConfigTransformation, +) +from airbyte_cdk.sources.types import FieldPointer + +logger = logging.getLogger("airbyte") + + +@dataclass +class RemoveFields(ConfigTransformation): + """ + A transformation which removes fields from a config. The fields removed are designated using FieldPointers. + During transformation, if a field or any of its parents does not exist in the config, no error is thrown. + + If an input field pointer references an item in a list (e.g: ["k", 0] in the object {"k": ["a", "b", "c"]}) then + the object at that index is set to None rather than being entirely removed from the list. + + It's possible to remove objects nested in lists e.g: removing [".", 0, "k"] from {".": [{"k": "V"}]} results in {".": [{}]} + + Usage syntax: + + ```yaml + config_transformations: + - type: RemoveFields + field_pointers: + - ["path", "to", "field1"] + - ["path2"] + condition: "{{ config.some_flag }}" # Optional condition + ``` + + Attributes: + field_pointers (List[FieldPointer]): pointers to the fields that should be removed + condition (str): Optional condition that determines if the fields should be removed + """ + + field_pointers: List[FieldPointer] + condition: str = "" + + def __post_init__(self) -> None: + self._filter_interpolator = InterpolatedBoolean(condition=self.condition, parameters={}) + + def transform( + self, + config: MutableMapping[str, Any], + ) -> None: + """ + Transforms a config by removing fields based on the provided field pointers. + + :param config: The user-provided configuration to be transformed + """ + if self.condition and not self._filter_interpolator.eval(config): + return + + for pointer in self.field_pointers: + try: + dpath.delete(config, pointer) + except dpath.exceptions.PathNotFound: + pass + except Exception as e: + logger.error(f"Error removing field {pointer}: {e}") + raise e diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py new file mode 100644 index 000000000..1c8ab062e --- /dev/null +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py @@ -0,0 +1,97 @@ +# +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. +# + +import pytest + +from airbyte_cdk.sources.declarative.transformations.config_transformations.add_fields import ( + AddedFieldDefinition, + AddFields, +) + + +def test_given_valid_static_value_fields_added(): + transformation = AddFields( + fields=[ + AddedFieldDefinition(path=["new_field"], value="static_value"), + AddedFieldDefinition(path=["another_field"], value="another_value"), + ] + ) + config = {} + + transformation.transform(config) + + assert config == { + "new_field": "static_value", + "another_field": "another_value", + } + + +def test_given_valid_nested_fields_static_value_added(): + transformation = AddFields( + fields=[ + AddedFieldDefinition(path=["parent", "child", "grandchild"], value="nested_value"), + ] + ) + config = {} + + transformation.transform(config) + + assert config == {"parent": {"child": {"grandchild": "nested_value"}}} + + +def test_given_valid_interpolated_input_field_added(): + transformation = AddFields( + fields=[ + AddedFieldDefinition(path=["derived_field"], value="{{ config.original_field }}"), + AddedFieldDefinition(path=["expression_result"], value="{{ 2 * 3 }}"), + ] + ) + config = {"original_field": "original_value"} + + transformation.transform(config) + + assert config == { + "original_field": "original_value", + "derived_field": "original_value", + "expression_result": 6, + } + + +def test_given_invalid_field_raises_exception(): + with pytest.raises(ValueError): + AddFields(fields=[AddedFieldDefinition(path=[], value="value")]) + + with pytest.raises(ValueError): + AddFields(fields=[AddedFieldDefinition(path=["valid_path"], value=123)]) + + +def test_given_field_already_exists_value_is_overwritten(): + transformation = AddFields( + fields=[ + AddedFieldDefinition(path=["existing_field"], value="new_value"), + ] + ) + config = {"existing_field": "existing_value"} + + transformation.transform(config) + + assert config["existing_field"] == "new_value" + + +def test_with_condition_only_adds_fields_when_condition_is_met(): + transformation = AddFields( + fields=[ + AddedFieldDefinition(path=["conditional_field"], value="added_value"), + ], + condition="{{ config.flag == true }}", + ) + + config_true = {"flag": True} + transformation.transform(config_true) + + config_false = {"flag": False} + transformation.transform(config_false) + + assert "conditional_field" in config_true + assert "conditional_field" not in config_false diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py new file mode 100644 index 000000000..724d774d3 --- /dev/null +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py @@ -0,0 +1,100 @@ +# +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. +# + +import pytest + +from airbyte_cdk.sources.declarative.transformations.config_transformations.remove_fields import ( + RemoveFields, +) + + +def test_given_valid_field_pointer_field_is_removed(): + transformation = RemoveFields( + field_pointers=[ + ["field_to_remove"], + ["another_field_to_remove"], + ] + ) + config = { + "field_to_remove": "value_to_remove", + "another_field_to_remove": "another_value_to_remove", + "field_to_keep": "value_to_keep", + } + + transformation.transform(config) + + assert "field_to_remove" not in config + assert "another_field_to_remove" not in config + assert "field_to_keep" in config + assert config == {"field_to_keep": "value_to_keep"} + + +def test_given_valid_nested_field_pointer_field_is_removed(): + transformation = RemoveFields( + field_pointers=[["parent", "child", "field_to_remove"], ["parent", "another_child"]] + ) + config = { + "parent": { + "child": { + "field_to_remove": "nested_value_to_remove", + "field_to_keep": "nested_value_to_keep", + }, + "another_child": "another_child_value", + "child_to_keep": "child_value_to_keep", + }, + "top_level_field": "top_level_value", + } + + transformation.transform(config) + + assert "field_to_remove" not in config["parent"]["child"] + assert "another_child" not in config["parent"] + assert config["parent"]["child"]["field_to_keep"] == "nested_value_to_keep" + assert config["parent"]["child_to_keep"] == "child_value_to_keep" + assert config["top_level_field"] == "top_level_value" + assert config == { + "parent": { + "child": {"field_to_keep": "nested_value_to_keep"}, + "child_to_keep": "child_value_to_keep", + }, + "top_level_field": "top_level_value", + } + + +def test_given_valid_field_point_but_field_does_not_exist_no_field_is_removed(): + transformation = RemoveFields( + field_pointers=[ + ["nonexistent_field"], + ["parent", "nonexistent_child"], + ["parent", "child", "nonexistent_grandchild"], + ["completely", "missing", "path"], + ] + ) + + config = {"existing_field": "value", "parent": {"child": {"existing_grandchild": "value"}}} + + original_config = config.copy() + + transformation.transform(config) + + assert config == original_config + + +def test_with_condition_only_removes_fields_when_condition_is_met(): + transformation = RemoveFields( + field_pointers=[ + ["conditional_field"], + ], + condition="{{ config.flag == true }}", + ) + + config_true = {"flag": True, "conditional_field": "this should be removed"} + transformation.transform(config_true) + + config_false = {"flag": False, "conditional_field": "this should not be removed"} + transformation.transform(config_false) + + assert "conditional_field" not in config_true + assert "conditional_field" in config_false + assert config_false["conditional_field"] == "this should not be removed" From fb7d1e965cc5d895d5094abcf1835b14a26527ff Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 13 May 2025 09:40:32 -0700 Subject: [PATCH 27/56] fix module and classname conflicts --- .../transformations/config_transformations/__init__.py | 8 ++++---- .../transformations/config_transformations/add_fields.py | 4 ++-- .../transformations/config_transformations/remap_field.py | 2 +- .../config_transformations/remove_fields.py | 8 +------- .../{test_add_fields.py => test_config_add_fields.py} | 2 +- .../{test_remap_field.py => test_config_remap_field.py} | 2 +- ...test_remove_fields.py => test_config_remove_fields.py} | 4 +--- 7 files changed, 11 insertions(+), 19 deletions(-) rename unit_tests/sources/declarative/transformations/config_transformations/{test_add_fields.py => test_config_add_fields.py} (98%) rename unit_tests/sources/declarative/transformations/config_transformations/{test_remap_field.py => test_config_remap_field.py} (99%) rename unit_tests/sources/declarative/transformations/config_transformations/{test_remove_fields.py => test_config_remove_fields.py} (98%) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py index a6b15a67d..e344f3147 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/__init__.py @@ -2,8 +2,8 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -from .add_fields import AddFields -from .remap_field import RemapField -from .remove_fields import RemoveFields +from .add_fields import ConfigAddFields +from .remap_field import ConfigRemapField +from .remove_fields import ConfigRemoveFields -__all__ = ["RemapField", "AddFields", "RemoveFields"] +__all__ = ["ConfigRemapField", "ConfigAddFields", "ConfigRemoveFields"] diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py index 6af8628f9..4ead5a4f0 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py @@ -34,7 +34,7 @@ class ParsedAddFieldDefinition: @dataclass -class AddFields(ConfigTransformation): +class ConfigAddFields(ConfigTransformation): """ Transformation which adds fields to a config. The path of the added field can be nested. Adding nested fields will create all necessary parent objects (like mkdir -p). @@ -42,7 +42,7 @@ class AddFields(ConfigTransformation): This transformation has access to the config being transformed. Examples of instantiating this transformation via YAML: - - type: AddFields + - type: ConfigAddFields fields: # hardcoded constant - path: ["path"] diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index 23c70a118..919e0a3c4 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -12,7 +12,7 @@ @dataclass -class RemapField(ConfigTransformation): +class ConfigRemapField(ConfigTransformation): """ Transformation that remaps a field's value to another value based on a static map. """ diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py index bec2806de..3d74c329f 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remove_fields.py @@ -1,7 +1,6 @@ # # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -import logging from dataclasses import dataclass from typing import Any, List, MutableMapping @@ -14,11 +13,9 @@ ) from airbyte_cdk.sources.types import FieldPointer -logger = logging.getLogger("airbyte") - @dataclass -class RemoveFields(ConfigTransformation): +class ConfigRemoveFields(ConfigTransformation): """ A transformation which removes fields from a config. The fields removed are designated using FieldPointers. During transformation, if a field or any of its parents does not exist in the config, no error is thrown. @@ -67,6 +64,3 @@ def transform( dpath.delete(config, pointer) except dpath.exceptions.PathNotFound: pass - except Exception as e: - logger.error(f"Error removing field {pointer}: {e}") - raise e diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py similarity index 98% rename from unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py rename to unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py index 1c8ab062e..9f3a67c58 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_add_fields.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py @@ -6,7 +6,7 @@ from airbyte_cdk.sources.declarative.transformations.config_transformations.add_fields import ( AddedFieldDefinition, - AddFields, + ConfigAddFields as AddFields, ) diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py similarity index 99% rename from unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py rename to unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py index af0493e92..183a7b3c5 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_remap_field.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py @@ -4,7 +4,7 @@ import pytest from airbyte_cdk.sources.declarative.transformations.config_transformations.remap_field import ( - RemapField, + ConfigRemapField as RemapField, ) diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remove_fields.py similarity index 98% rename from unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py rename to unit_tests/sources/declarative/transformations/config_transformations/test_config_remove_fields.py index 724d774d3..cb64e7671 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_remove_fields.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remove_fields.py @@ -2,10 +2,8 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -import pytest - from airbyte_cdk.sources.declarative.transformations.config_transformations.remove_fields import ( - RemoveFields, + ConfigRemoveFields as RemoveFields, ) From c64e58801113f8354cbf32f228999c3bf8d2e484 Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 13 May 2025 09:41:57 -0700 Subject: [PATCH 28/56] chore: lint --- .../config_transformations/test_config_add_fields.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py index 9f3a67c58..dd6b79248 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py @@ -6,6 +6,8 @@ from airbyte_cdk.sources.declarative.transformations.config_transformations.add_fields import ( AddedFieldDefinition, +) +from airbyte_cdk.sources.declarative.transformations.config_transformations.add_fields import ( ConfigAddFields as AddFields, ) From 01cf5a6d91c10333c50fb6162e48ede8a0cf6289 Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 13 May 2025 16:25:53 -0700 Subject: [PATCH 29/56] update remap to handle interpolated keys/values --- .../config_transformations/add_fields.py | 3 +- .../config_transformations/remap_field.py | 12 +++++--- .../test_config_remap_field.py | 28 +++++++++++++++---- 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py index 4ead5a4f0..f36894ced 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py @@ -113,8 +113,7 @@ def transform( for parsed_field in self._parsed_fields: valid_types = (parsed_field.value_type,) if parsed_field.value_type else None value = parsed_field.value.eval(config, valid_types=valid_types) - is_empty_condition = not self.condition - if is_empty_condition or self._filter_interpolator.eval( + if not self.condition or self._filter_interpolator.eval( config, value=value, path=parsed_field.path ): dpath.new(config, parsed_field.path, value) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index 919e0a3c4..24a7e8137 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -2,9 +2,11 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Any, List, Mapping, MutableMapping, Union +from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean +from airbyte_cdk.sources.declarative.interpolation.interpolated_mapping import InterpolatedMapping from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( ConfigTransformation, @@ -19,6 +21,7 @@ class ConfigRemapField(ConfigTransformation): map: Mapping[str, Any] field_path: List[Union[InterpolatedString, str]] + config: Mapping[str, Any] = field(default_factory=dict) def __post_init__(self) -> None: if not self.field_path: @@ -31,6 +34,7 @@ def __post_init__(self) -> None: self._field_path[path_index] = InterpolatedString.create( self.field_path[path_index], parameters={} ) + self._map = InterpolatedMapping(self.map, parameters={}).eval(config=self.config) def transform( self, @@ -51,10 +55,10 @@ def transform( return current = current[component] - if not isinstance(current, Mapping): + if not isinstance(current, MutableMapping): return field_name = path_components[-1] - if field_name in current and current[field_name] in self.map: - current[field_name] = self.map[current[field_name]] + if field_name in current and current[field_name] in self._map: + current[field_name] = self._map[current[field_name]] diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py index 183a7b3c5..adeb70b27 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py @@ -3,18 +3,13 @@ import pytest -from airbyte_cdk.sources.declarative.transformations.config_transformations.remap_field import ( +from airbyte_cdk.sources.declarative.transformations.config_transformations import ( ConfigRemapField as RemapField, ) class TestRemapField(TestCase): def test_given_valid_inputs_when_transform_then_field_is_remapped(self): - remap_transform = RemapField( - field_path=["authorization", "auth_type"], - map={"client_credentials": "oauth2", "api_key": "key_auth"}, - ) - config = { "authorization": { "auth_type": "client_credentials", @@ -22,6 +17,12 @@ def test_given_valid_inputs_when_transform_then_field_is_remapped(self): "client_secret": "secret", } } + remap_transform = RemapField( + field_path=["authorization", "auth_type"], + map={"client_credentials": "oauth2", "api_key": "key_auth"}, + config=config + ) + original_config = deepcopy(config) remap_transform.transform(config) @@ -110,3 +111,18 @@ def test_multiple_transformations_applied_in_sequence(self): assert config["auth"]["type"] == "oauth2" assert config["environment"] == "development" + + def test_amazon_seller_partner_marketplace_remap_with_interpolated_mapping(self): + + mapping = { + "endpoint": { + "ES": "{{ 'https://sellingpartnerapi' if config.environment == 'production' else 'https://sandbox.sellingpartnerapi' }}-eu.amazon.com", + } + } + sandbox_config = {"environment": "sandbox", "marketplace": "ES"} + production_config = {"environment": "production", "marketplace": "ES"} + RemapField(field_path=["marketplace"], map=mapping["endpoint"], config=sandbox_config).transform(sandbox_config) + RemapField(field_path=["marketplace"], map=mapping["endpoint"], config=production_config).transform(production_config) + + assert sandbox_config["marketplace"] == "https://sandbox.sellingpartnerapi-eu.amazon.com" + assert production_config["marketplace"] == "https://sellingpartnerapi-eu.amazon.com" From a2dc105f7226281f9d17bdb7bf1ad5eb4480fadf Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 13 May 2025 16:31:06 -0700 Subject: [PATCH 30/56] chore: format --- .../config_transformations/test_config_remap_field.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py index adeb70b27..4f81484b2 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_remap_field.py @@ -20,7 +20,7 @@ def test_given_valid_inputs_when_transform_then_field_is_remapped(self): remap_transform = RemapField( field_path=["authorization", "auth_type"], map={"client_credentials": "oauth2", "api_key": "key_auth"}, - config=config + config=config, ) original_config = deepcopy(config) @@ -113,7 +113,6 @@ def test_multiple_transformations_applied_in_sequence(self): assert config["environment"] == "development" def test_amazon_seller_partner_marketplace_remap_with_interpolated_mapping(self): - mapping = { "endpoint": { "ES": "{{ 'https://sellingpartnerapi' if config.environment == 'production' else 'https://sandbox.sellingpartnerapi' }}-eu.amazon.com", @@ -121,8 +120,12 @@ def test_amazon_seller_partner_marketplace_remap_with_interpolated_mapping(self) } sandbox_config = {"environment": "sandbox", "marketplace": "ES"} production_config = {"environment": "production", "marketplace": "ES"} - RemapField(field_path=["marketplace"], map=mapping["endpoint"], config=sandbox_config).transform(sandbox_config) - RemapField(field_path=["marketplace"], map=mapping["endpoint"], config=production_config).transform(production_config) + RemapField( + field_path=["marketplace"], map=mapping["endpoint"], config=sandbox_config + ).transform(sandbox_config) + RemapField( + field_path=["marketplace"], map=mapping["endpoint"], config=production_config + ).transform(production_config) assert sandbox_config["marketplace"] == "https://sandbox.sellingpartnerapi-eu.amazon.com" assert production_config["marketplace"] == "https://sellingpartnerapi-eu.amazon.com" From 4e6ed3ba834596487698849035e769d21d32f3f7 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 14 May 2025 08:42:06 -0700 Subject: [PATCH 31/56] update component schema for new transformations --- .../declarative_component_schema.yaml | 168 +++++++++++++----- .../models/declarative_component_schema.py | 166 +++++++++++------ 2 files changed, 238 insertions(+), 96 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 40972ac9c..790da3435 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -168,7 +168,7 @@ definitions: - X-Auth-Token inject_into: title: Inject API Key Into Outgoing HTTP Request - description: Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined. + description: Configure how the API Key will be sent in requests to the source API. "$ref": "#/definitions/RequestOption" examples: - inject_into: header @@ -3811,17 +3811,31 @@ definitions: config_migrations: title: Config Migrations description: The config will be migrated according to these transformations and updated within the platform for subsequent syncs. - type: array - items: - anyOf: - - "$ref": '#/definitions/ConfigTransformations/RemapField' + type: object + required: + - description + - transformations + properties: + description: + type: string + description: The description/purpose of the config migration. + transformations: + type: array + items: + anyOf: + - "$ref": "#/definitions/ConfigRemapField" + - "$ref": "#/definitions/ConfigAddFields" + - "$ref": "#/definitions/ConfigRemoveFields" + default: [] transformations: title: Transformations description: The list of transformations that will be applied on the incoming config at the start of a sync. type: array items: anyOf: - - "$ref": '#/definitions/ConfigTransformations/RemapField' + - "$ref": "#/definitions/ConfigRemapField" + - "$ref": "#/definitions/ConfigAddFields" + - "$ref": "#/definitions/ConfigRemoveFields" validations: title: Validations description: The list of validations that will be performed on the incoming config before starting a sync @@ -4202,7 +4216,7 @@ definitions: properties: type: type: string - enum: [ DpathValidator ] + enum: [DpathValidator] field_path: title: Field Path description: List of potentially nested fields describing the full path of the field to validate. Use "*" to validate all values from an array. @@ -4212,10 +4226,10 @@ definitions: interpolation_context: - config examples: - - [ "data" ] - - [ "data", "records" ] - - [ "data", "{{ parameters.name }}" ] - - [ "data", "*", "record" ] + - ["data"] + - ["data", "records"] + - ["data", "{{ parameters.name }}"] + - ["data", "*", "record"] validation_strategy: title: Validation Stragey description: The condition that the specified config value will be evaluated against @@ -4308,42 +4322,106 @@ definitions: required: - name - age - ConfigTransformations: - RemapField: - title: Remap Field - description: Transformation that remaps a field's value to another value based on a static map. - type: object - required: - - type - - map - - field_path - properties: + ConfigRemapField: + title: Remap Field + description: Transformation that remaps a field's value to another value based on a static map. + type: object + required: + - type + - map + - field_path + properties: + type: + type: string + enum: [RemapField] + map: + title: Value Mapping + description: A mapping of original values to new values. When a field value matches a key in this map, it will be replaced with the corresponding value. + interpolation_context: + - config type: + - object + - string + additionalProperties: true + examples: + - pending: "in_progress" + done: "completed" + cancelled: "terminated" + - "{{ config['status_mapping'] }}" + field_path: + title: Field Path + description: The path to the field whose value should be remapped. Specified as a list of path components to navigate through nested objects. + interpolation_context: + - config + type: array + items: type: string - enum: [RemapField] - map: - title: Value Mapping - description: A mapping of original values to new values. When a field value matches a key in this map, it will be replaced with the corresponding value. - type: - - object - - string - additionalProperties: true - examples: - - pending: "in_progress" - done: "completed" - cancelled: "terminated" - - "{{ config['status_mapping'] }}" - field_path: - title: Field Path - description: The path to the field whose value should be remapped. Specified as a list of path components to navigate through nested objects. - type: array - items: - type: string - examples: - - ["status"] - - ["data", "status"] - - ["data", "{{ parameters.name }}", "status"] - - ["data", "*", "status"] + examples: + - ["status"] + - ["data", "status"] + - ["data", "{{ config.name }}", "status"] + - ["data", "*", "status"] + ConfigAddFields: + title: Config Add Fields + description: Transformation that adds fields to a config. The path of the added field can be nested. + type: object + required: + - type + - fields + properties: + type: + type: string + enum: [ConfigAddFields] + fields: + title: Fields + description: A list of transformations (path and corresponding value) that will be added to the config. + type: array + items: + "$ref": "#/definitions/AddedFieldDefinition" + condition: + description: Fields will be added if expression is evaluated to True. + type: string + default: "" + interpolation_context: + - config + - property + examples: + - "{{ config['environemnt'] == 'sandbox' }}" + - "{{ property is integer }}" + - "{{ property|length > 5 }}" + - "{{ property == 'some_string_to_match' }}" + ConfigRemoveFields: + title: Config Remove Fields + description: Transformation that removes a field from the config. + type: object + required: + - type + - field_pointers + properties: + type: + type: string + enum: [ConfigRemoveFields] + field_pointers: + title: Field Pointers + description: A list of field pointers to be removed from the config. + type: array + items: + type: string + examples: + - ["marketplace"] + - [["content", "html"], ["content", "plain_text"]] + condition: + description: Fields will be removed if expression is evaluated to True. + type: string + default: "" + interpolation_context: + - config + - property + examples: + - "{{ config['environemnt'] == 'sandbox' }}" + - "{{ property is integer }}" + - "{{ property|length > 5 }}" + - "{{ property == 'some_string_to_match' }}" interpolation: variables: - title: config diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 64edc82d5..81595b3e7 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -618,7 +618,9 @@ class OAuthAuthenticator(BaseModel): scopes: Optional[List[str]] = Field( None, description="List of scopes that should be granted to the access token.", - examples=[["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"]], + examples=[ + ["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"] + ], title="Scopes", ) token_expiry_date: Optional[str] = Field( @@ -1124,24 +1126,28 @@ class OAuthConfigSpecification(BaseModel): class Config: extra = Extra.allow - oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = Field( - None, - description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", - examples=[ - {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, - { - "app_id": { - "type": "string", - "path_in_connector_config": ["info", "app_id"], - } - }, - ], - title="OAuth user input", + oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = ( + Field( + None, + description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", + examples=[ + {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, + { + "app_id": { + "type": "string", + "path_in_connector_config": ["info", "app_id"], + } + }, + ], + title="OAuth user input", + ) ) - oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = Field( - None, - description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', - title="DeclarativeOAuth Connector Specification", + oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = ( + Field( + None, + description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', + title="DeclarativeOAuth Connector Specification", + ) ) complete_oauth_output_specification: Optional[Dict[str, Any]] = Field( None, @@ -1159,7 +1165,9 @@ class Config: complete_oauth_server_input_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }", - examples=[{"client_id": {"type": "string"}, "client_secret": {"type": "string"}}], + examples=[ + {"client_id": {"type": "string"}, "client_secret": {"type": "string"}} + ], title="OAuth input specification", ) complete_oauth_server_output_specification: Optional[Dict[str, Any]] = Field( @@ -1549,11 +1557,7 @@ class ValidateAdheresToSchema(BaseModel): ) -class ConfigTransformations(BaseModel): - __root__: Any - - -class RemapField(BaseModel): +class ConfigRemapField(BaseModel): type: Literal["RemapField"] map: Union[Dict[str, Any], str] = Field( ..., @@ -1570,13 +1574,33 @@ class RemapField(BaseModel): examples=[ ["status"], ["data", "status"], - ["data", "{{ parameters.name }}", "status"], + ["data", "{{ config.name }}", "status"], ["data", "*", "status"], ], title="Field Path", ) +class ConfigRemoveFields(BaseModel): + type: Literal["ConfigRemoveFields"] + field_pointers: List[str] = Field( + ..., + description="A list of field pointers to be removed from the config.", + examples=[["marketplace"], [["content", "html"], ["content", "plain_text"]]], + title="Field Pointers", + ) + condition: Optional[str] = Field( + "", + description="Fields will be removed if expression is evaluated to True.", + examples=[ + "{{ config['environemnt'] == 'sandbox' }}", + "{{ property is integer }}", + "{{ property|length > 5 }}", + "{{ property == 'some_string_to_match' }}", + ], + ) + + class AddedFieldDefinition(BaseModel): type: Literal["AddedFieldDefinition"] path: List[str] = Field( @@ -1639,7 +1663,7 @@ class ApiKeyAuthenticator(BaseModel): ) inject_into: Optional[RequestOption] = Field( None, - description="Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined.", + description="Configure how the API Key will be sent in requests to the source API.", examples=[ {"inject_into": "header", "field_name": "Authorization"}, {"inject_into": "request_parameter", "field_name": "authKey"}, @@ -1941,7 +1965,9 @@ class RecordSelector(BaseModel): description="Responsible for filtering records to be emitted by the Source.", title="Record Filter", ) - schema_normalization: Optional[Union[SchemaNormalization, CustomSchemaNormalization]] = Field( + schema_normalization: Optional[ + Union[SchemaNormalization, CustomSchemaNormalization] + ] = Field( None, description="Responsible for normalization according to the schema.", title="Schema Normalization", @@ -2004,6 +2030,25 @@ class PredicateValidator(BaseModel): ) +class ConfigAddFields(BaseModel): + type: Literal["ConfigAddFields"] + fields: List[AddedFieldDefinition] = Field( + ..., + description="A list of transformations (path and corresponding value) that will be added to the config.", + title="Fields", + ) + condition: Optional[str] = Field( + "", + description="Fields will be added if expression is evaluated to True.", + examples=[ + "{{ config['environemnt'] == 'sandbox' }}", + "{{ property is integer }}", + "{{ property|length > 5 }}", + "{{ property == 'some_string_to_match' }}", + ], + ) + + class CompositeErrorHandler(BaseModel): type: Literal["CompositeErrorHandler"] error_handlers: List[Union[CompositeErrorHandler, DefaultErrorHandler]] = Field( @@ -2059,19 +2104,28 @@ class Config: ) +class ConfigMigrations(BaseModel): + description: str = Field( + ..., description="The description/purpose of the config migration." + ) + transformations: List[Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields]] + + class ConfigNormalizationRules(BaseModel): - config_migrations: Optional[List[RemapField]] = Field( - [], + config_migrations: Optional[ConfigMigrations] = Field( + None, description="The config will be migrated according to these transformations and updated within the platform for subsequent syncs.", title="Config Migrations", ) - transformations: Optional[List[RemapField]] = Field( - [], + transformations: Optional[ + List[Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields]] + ] = Field( + None, description="The list of transformations that will be applied on the incoming config at the start of a sync.", title="Transformations", ) validations: Optional[List[Union[DpathValidator, PredicateValidator]]] = Field( - [], + None, description="The list of validations that will be performed on the incoming config before starting a sync", title="Validations", ) @@ -2248,7 +2302,9 @@ class Config: extra = Extra.allow type: Literal["DeclarativeStream"] - name: Optional[str] = Field("", description="The stream name.", example=["Users"], title="Name") + name: Optional[str] = Field( + "", description="The stream name.", example=["Users"], title="Name" + ) retriever: Union[SimpleRetriever, AsyncRetriever, CustomRetriever] = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages.", @@ -2426,18 +2482,20 @@ class HttpRequester(BaseModelWithDeprecations): description="Allows for retrieving a dynamic set of properties from an API endpoint which can be injected into outbound request using the stream_partition.extra_fields.", title="Fetch Properties from Endpoint", ) - request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = Field( - None, - description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", - examples=[ - {"unit": "day"}, - { - "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' - }, - {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, - {"sort_by[asc]": "updated_at"}, - ], - title="Query Parameters", + request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = ( + Field( + None, + description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", + examples=[ + {"unit": "day"}, + { + "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' + }, + {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, + {"sort_by[asc]": "updated_at"}, + ], + title="Query Parameters", + ) ) request_headers: Optional[Union[Dict[str, str], str]] = Field( None, @@ -2627,7 +2685,9 @@ class QueryProperties(BaseModel): class StateDelegatingStream(BaseModel): type: Literal["StateDelegatingStream"] - name: str = Field(..., description="The stream name.", example=["Users"], title="Name") + name: str = Field( + ..., description="The stream name.", example=["Users"], title="Name" + ) full_refresh_stream: DeclarativeStream = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages when the state is empty or not provided.", @@ -2716,7 +2776,9 @@ class AsyncRetriever(BaseModel): ) download_extractor: Optional[ Union[DpathExtractor, CustomRecordExtractor, ResponseToFileExtractor] - ] = Field(None, description="Responsible for fetching the records from provided urls.") + ] = Field( + None, description="Responsible for fetching the records from provided urls." + ) creation_requester: Union[HttpRequester, CustomRequester] = Field( ..., description="Requester component that describes how to prepare HTTP requests to send to the source API to create the async server-side job.", @@ -2856,10 +2918,12 @@ class DynamicDeclarativeStream(BaseModel): stream_template: DeclarativeStream = Field( ..., description="Reference to the stream template.", title="Stream Template" ) - components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = Field( - ..., - description="Component resolve and populates stream templates with components values.", - title="Components Resolver", + components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = ( + Field( + ..., + description="Component resolve and populates stream templates with components values.", + title="Components Resolver", + ) ) From 833d9e7a8dbab889c5553c28675ef1923f6d42b7 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 14 May 2025 08:45:41 -0700 Subject: [PATCH 32/56] update transformations per comments --- .../config_transformations/config_transformation.py | 4 ++-- .../transformations/config_transformations/remap_field.py | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py index a30e9cb02..a7db4d7a4 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/config_transformation.py @@ -3,7 +3,7 @@ # from abc import ABC, abstractmethod -from typing import Any, Dict +from typing import Any, MutableMapping class ConfigTransformation(ABC): @@ -14,7 +14,7 @@ class ConfigTransformation(ABC): @abstractmethod def transform( self, - config: Dict[str, Any], + config: MutableMapping[str, Any], ) -> None: """ Transform a configuration by adding, deleting, or mutating fields directly from the config reference passed in argument. diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index 24a7e8137..3f7120d0a 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -34,7 +34,7 @@ def __post_init__(self) -> None: self._field_path[path_index] = InterpolatedString.create( self.field_path[path_index], parameters={} ) - self._map = InterpolatedMapping(self.map, parameters={}).eval(config=self.config) + self._map = InterpolatedMapping(self.map, parameters={}) def transform( self, @@ -60,5 +60,7 @@ def transform( field_name = path_components[-1] - if field_name in current and current[field_name] in self._map: - current[field_name] = self._map[current[field_name]] + mapping = self._map.eval(config=self.config) + + if field_name in current and current[field_name] in mapping: + current[field_name] = mapping[current[field_name]] From ff10aa3ee81cc5e016504254ccbca5af08d69b31 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 14 May 2025 17:06:58 -0700 Subject: [PATCH 33/56] add ConfigMigration class and new spec tests --- .../declarative_component_schema.yaml | 58 +++-- .../models/declarative_component_schema.py | 33 +-- .../parsers/model_to_component_factory.py | 100 +++++++- .../sources/declarative/spec/__init__.py | 4 +- airbyte_cdk/sources/declarative/spec/spec.py | 13 +- .../config_transformations/add_fields.py | 33 +-- .../config_transformations/remap_field.py | 2 +- .../declarative/validators/dpath_validator.py | 2 +- .../sources/declarative/spec/test_spec.py | 216 +++++++++++++++++- .../test_config_add_fields.py | 18 +- 10 files changed, 399 insertions(+), 80 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 790da3435..2ff6cf765 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -3810,26 +3810,13 @@ definitions: properties: config_migrations: title: Config Migrations - description: The config will be migrated according to these transformations and updated within the platform for subsequent syncs. - type: object - required: - - description - - transformations - properties: - description: - type: string - description: The description/purpose of the config migration. - transformations: - type: array - items: - anyOf: - - "$ref": "#/definitions/ConfigRemapField" - - "$ref": "#/definitions/ConfigAddFields" - - "$ref": "#/definitions/ConfigRemoveFields" - default: [] + description: The discrete migrations that will be applied on the incoming config. Each migration will be applied in the order they are defined. + type: array + items: + "$ref": "#/definitions/ConfigMigration" transformations: title: Transformations - description: The list of transformations that will be applied on the incoming config at the start of a sync. + description: The list of transformations that will be applied on the incoming config at the start of each sync. The transformations will be applied in the order they are defined. type: array items: anyOf: @@ -3838,12 +3825,32 @@ definitions: - "$ref": "#/definitions/ConfigRemoveFields" validations: title: Validations - description: The list of validations that will be performed on the incoming config before starting a sync + description: The list of validations that will be performed on the incoming config at the start of each sync. type: array items: anyOf: - "$ref": "#/definitions/DpathValidator" - "$ref": "#/definitions/PredicateValidator" + ConfigMigration: + title: Config Migration + description: A config migration that will be applied on the incoming config at the start of a sync. + type: object + required: + - transformations + properties: + description: + type: string + description: The description/purpose of the config migration. + transformations: + title: Transformations + description: The list of transformations that will attempt to be applied on an incoming unmigrated config. The transformations will be applied in the order they are defined. + type: array + items: + anyOf: + - "$ref": "#/definitions/ConfigRemapField" + - "$ref": "#/definitions/ConfigAddFields" + - "$ref": "#/definitions/ConfigRemoveFields" + default: [] SubstreamPartitionRouter: title: Substream Partition Router description: Partition router that is used to retrieve records that have been partitioned according to records from the specified parent streams. An example of a parent stream is automobile brands and the substream would be the various car models associated with each branch. @@ -4275,14 +4282,14 @@ definitions: type: object required: - type - - schema + - base_schema properties: type: type: string enum: [ValidateAdheresToSchema] - schema: - title: JSON Schema - description: The JSON schema used for validation. + base_schema: + title: Base JSON Schema + description: The base JSON schema against which the user-provided schema will be validated. type: - string - object @@ -4406,9 +4413,10 @@ definitions: description: A list of field pointers to be removed from the config. type: array items: - type: string + items: + type: string examples: - - ["marketplace"] + - ["tags"] - [["content", "html"], ["content", "plain_text"]] condition: description: Fields will be removed if expression is evaluated to True. diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 81595b3e7..d716f6d86 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1531,10 +1531,9 @@ class Config: class ValidateAdheresToSchema(BaseModel): type: Literal["ValidateAdheresToSchema"] - schema_: Union[str, Dict[str, Any]] = Field( + base_schema: Union[str, Dict[str, Any]] = Field( ..., - alias="schema", - description="The JSON schema used for validation.", + description="The base JSON schema against which the user-provided schema will be validated.", examples=[ "{{ config['report_validation_schema'] }}", '\'{\n "$schema": "http://json-schema.org/draft-07/schema#",\n "title": "Person",\n "type": "object",\n "properties": {\n "name": {\n "type": "string",\n "description": "The person\'s name"\n },\n "age": {\n "type": "integer",\n "minimum": 0,\n "description": "The person\'s age"\n }\n },\n "required": ["name", "age"]\n}\'\n', @@ -1553,7 +1552,7 @@ class ValidateAdheresToSchema(BaseModel): "required": ["name", "age"], }, ], - title="JSON Schema", + title="Base JSON Schema", ) @@ -1583,10 +1582,10 @@ class ConfigRemapField(BaseModel): class ConfigRemoveFields(BaseModel): type: Literal["ConfigRemoveFields"] - field_pointers: List[str] = Field( + field_pointers: List[List[str]] = Field( ..., description="A list of field pointers to be removed from the config.", - examples=[["marketplace"], [["content", "html"], ["content", "plain_text"]]], + examples=[["tags"], [["content", "html"], ["content", "plain_text"]]], title="Field Pointers", ) condition: Optional[str] = Field( @@ -2104,29 +2103,35 @@ class Config: ) -class ConfigMigrations(BaseModel): - description: str = Field( - ..., description="The description/purpose of the config migration." +class ConfigMigration(BaseModel): + description: Optional[str] = Field( + None, description="The description/purpose of the config migration." + ) + transformations: List[ + Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields] + ] = Field( + ..., + description="The list of transformations that will attempt to be applied on an incoming unmigrated config. The transformations will be applied in the order they are defined.", + title="Transformations", ) - transformations: List[Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields]] class ConfigNormalizationRules(BaseModel): - config_migrations: Optional[ConfigMigrations] = Field( + config_migrations: Optional[List[ConfigMigration]] = Field( None, - description="The config will be migrated according to these transformations and updated within the platform for subsequent syncs.", + description="The discrete migrations that will be applied on the incoming config. Each migration will be applied in the order they are defined.", title="Config Migrations", ) transformations: Optional[ List[Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields]] ] = Field( None, - description="The list of transformations that will be applied on the incoming config at the start of a sync.", + description="The list of transformations that will be applied on the incoming config at the start of each sync. The transformations will be applied in the order they are defined.", title="Transformations", ) validations: Optional[List[Union[DpathValidator, PredicateValidator]]] = Field( None, - description="The list of validations that will be performed on the incoming config before starting a sync", + description="The list of validations that will be performed on the incoming config at the start of each sync.", title="Validations", ) diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 25166f279..b155d80fd 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -19,6 +19,7 @@ Optional, Type, Union, + cast, get_args, get_origin, get_type_hints, @@ -154,9 +155,21 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( ConcurrencyLevel as ConcurrencyLevelModel, ) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + ConfigAddFields as ConfigAddFieldsModel, +) from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( ConfigComponentsResolver as ConfigComponentsResolverModel, ) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + ConfigMigration as ConfigMigrationModel, +) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + ConfigRemapField as ConfigRemapFieldModel, +) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + ConfigRemoveFields as ConfigRemoveFieldsModel, +) from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( ConstantBackoffStrategy as ConstantBackoffStrategyModel, ) @@ -226,6 +239,9 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( DpathFlattenFields as DpathFlattenFieldsModel, ) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + DpathValidator as DpathValidatorModel, +) from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( DynamicSchemaLoader as DynamicSchemaLoaderModel, ) @@ -337,6 +353,9 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( ParentStreamConfig as ParentStreamConfigModel, ) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + PredicateValidator as PredicateValidatorModel, +) from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( PropertiesFromEndpoint as PropertiesFromEndpointModel, ) @@ -401,6 +420,9 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( UnlimitedCallRatePolicy as UnlimitedCallRatePolicyModel, ) +from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( + ValidateAdheresToSchema as ValidateAdheresToSchemaModel, +) from airbyte_cdk.sources.declarative.models.declarative_component_schema import ValueType from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( WaitTimeFromHeader as WaitTimeFromHeaderModel, @@ -506,7 +528,7 @@ TypesMap, ) from airbyte_cdk.sources.declarative.schema.composite_schema_loader import CompositeSchemaLoader -from airbyte_cdk.sources.declarative.spec import Spec +from airbyte_cdk.sources.declarative.spec import ConfigMigration, Spec from airbyte_cdk.sources.declarative.stream_slicers import StreamSlicer from airbyte_cdk.sources.declarative.transformations import ( AddFields, @@ -514,6 +536,11 @@ RemoveFields, ) from airbyte_cdk.sources.declarative.transformations.add_fields import AddedFieldDefinition +from airbyte_cdk.sources.declarative.transformations.config_transformations import ( + ConfigAddFields, + ConfigRemapField, + ConfigRemoveFields, +) from airbyte_cdk.sources.declarative.transformations.dpath_flatten_fields import ( DpathFlattenFields, KeyTransformation, @@ -530,6 +557,11 @@ from airbyte_cdk.sources.declarative.transformations.keys_to_snake_transformation import ( KeysToSnakeCaseTransformation, ) +from airbyte_cdk.sources.declarative.validators import ( + DpathValidator, + PredicateValidator, + ValidateAdheresToSchema, +) from airbyte_cdk.sources.message import ( InMemoryMessageRepository, LogAppenderMessageRepositoryDecorator, @@ -617,6 +649,10 @@ def _init_mappings(self) -> None: CheckDynamicStreamModel: self.create_check_dynamic_stream, CompositeErrorHandlerModel: self.create_composite_error_handler, ConcurrencyLevelModel: self.create_concurrency_level, + ConfigMigrationModel: self.create_config_migration, + ConfigAddFieldsModel: self.create_config_add_fields, + ConfigRemapFieldModel: self.create_config_remap_field, + ConfigRemoveFieldsModel: self.create_config_remove_fields, ConstantBackoffStrategyModel: self.create_constant_backoff_strategy, CsvDecoderModel: self.create_csv_decoder, CursorPaginationModel: self.create_cursor_pagination, @@ -640,6 +676,7 @@ def _init_mappings(self) -> None: DefaultErrorHandlerModel: self.create_default_error_handler, DefaultPaginatorModel: self.create_default_paginator, DpathExtractorModel: self.create_dpath_extractor, + DpathValidatorModel: self.create_dpath_validator, ResponseToFileExtractorModel: self.create_response_to_file_extractor, ExponentialBackoffStrategyModel: self.create_exponential_backoff_strategy, SessionTokenAuthenticatorModel: self.create_session_token_authenticator, @@ -673,6 +710,7 @@ def _init_mappings(self) -> None: OffsetIncrementModel: self.create_offset_increment, PageIncrementModel: self.create_page_increment, ParentStreamConfigModel: self.create_parent_stream_config, + PredicateValidatorModel: self.create_predicate_validator, PropertiesFromEndpointModel: self.create_properties_from_endpoint, PropertyChunkingModel: self.create_property_chunking, QueryPropertiesModel: self.create_query_properties, @@ -687,6 +725,7 @@ def _init_mappings(self) -> None: StateDelegatingStreamModel: self.create_state_delegating_stream, SpecModel: self.create_spec, SubstreamPartitionRouterModel: self.create_substream_partition_router, + ValidateAdheresToSchemaModel: self.create_validate_adheres_to_schema, WaitTimeFromHeaderModel: self.create_wait_time_from_header, WaitUntilTimeFromHeaderModel: self.create_wait_until_time_from_header, AsyncRetrieverModel: self.create_async_retriever, @@ -779,6 +818,65 @@ def _collect_model_deprecations(self, model: BaseModelWithDeprecations) -> None: if log not in self._collected_deprecation_logs: self._collected_deprecation_logs.append(log) + + def create_config_migration(self, model: ConfigMigrationModel, config: Config) -> ConfigMigration: + transformations = [] + for transformation in model.transformations: + transformations.append(self._create_component_from_model(transformation, config)) + + return ConfigMigration( + description=model.description, + transformations=transformations, + ) + + def create_config_add_fields(self, model: ConfigAddFieldsModel, config: Config) -> ConfigAddFields: + fields = [self._create_component_from_model(field, config) for field in model.fields] + return ConfigAddFields( + fields=fields, + condition=model.condition or "", + ) + + @staticmethod + def create_config_remove_fields(model: ConfigRemoveFieldsModel) -> ConfigRemoveFields: + return ConfigRemoveFields( + field_pointers=model.field_pointers, + condition=model.condition or "", + ) + + @staticmethod + def create_config_remap_field(model: ConfigRemapFieldModel, config: Config) -> ConfigRemapField: + mapping = cast(Mapping[str, Any], model.map) + return ConfigRemapField( + map=mapping, + field_path=model.field_path, + config=config, + ) + + + def create_dpath_validator(self, model: DpathValidatorModel, config: Config) -> DpathValidator: + + strategy = self._create_component_from_model(model.validation_strategy, config) + + return DpathValidator( + field_path=model.field_path, + strategy=strategy, + ) + + def create_predicate_validator(self, model: PredicateValidatorModel, config: Config) -> PredicateValidator: + strategy = self._create_component_from_model(model.validation_strategy, config) + + return PredicateValidator( + value=model.value, + strategy=strategy, + ) + + @staticmethod + def create_validate_adheres_to_schema(model: ValidateAdheresToSchemaModel) -> ValidateAdheresToSchema: + base_schema = cast(Mapping[str, Any], model.base_schema) + return ValidateAdheresToSchema( + schema=base_schema, + ) + @staticmethod def create_added_field_definition( model: AddedFieldDefinitionModel, config: Config, **kwargs: Any diff --git a/airbyte_cdk/sources/declarative/spec/__init__.py b/airbyte_cdk/sources/declarative/spec/__init__.py index 1c13ed67c..63df0531b 100644 --- a/airbyte_cdk/sources/declarative/spec/__init__.py +++ b/airbyte_cdk/sources/declarative/spec/__init__.py @@ -2,6 +2,6 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from airbyte_cdk.sources.declarative.spec.spec import Spec +from airbyte_cdk.sources.declarative.spec.spec import ConfigMigration, Spec -__all__ = ["Spec"] +__all__ = ["Spec", "ConfigMigration"] diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 1181a4228..8d00560e4 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -25,6 +25,12 @@ from airbyte_cdk.sources.source import Source +@dataclass +class ConfigMigration: + transformations: List[ConfigTransformation] + description: Optional[str] = None + + @dataclass class Spec: """ @@ -39,7 +45,7 @@ class Spec: parameters: InitVar[Mapping[str, Any]] documentation_url: Optional[str] = None advanced_auth: Optional[AuthFlow] = None - config_migrations: List[ConfigTransformation] = field(default_factory=list) + config_migrations: List[ConfigMigration] = field(default_factory=list) config_transformations: List[ConfigTransformation] = field(default_factory=list) config_validations: List[Validator] = field(default_factory=list) message_repository: MessageRepository = InMemoryMessageRepository() @@ -79,8 +85,9 @@ def migrate_config( return mutable_config = dict(config) - for transformation in self.config_migrations: - transformation.transform(mutable_config) + for migration in self.config_migrations: + for transformation in migration.transformations: + transformation.transform(mutable_config) if mutable_config != config: with open(config_path, "w") as f: diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py index f36894ced..c58785f3e 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/add_fields.py @@ -9,28 +9,13 @@ from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.transformations.add_fields import ( + AddedFieldDefinition, + ParsedAddFieldDefinition, +) from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( ConfigTransformation, ) -from airbyte_cdk.sources.types import FieldPointer - - -@dataclass(frozen=True) -class AddedFieldDefinition: - """Defines the field to add on a config""" - - path: FieldPointer - value: Union[InterpolatedString, str] - value_type: Optional[Type[Any]] = None - - -@dataclass(frozen=True) -class ParsedAddFieldDefinition: - """Defines the field to add on a config""" - - path: FieldPointer - value: InterpolatedString - value_type: Optional[Type[Any]] = None @dataclass @@ -44,19 +29,19 @@ class ConfigAddFields(ConfigTransformation): Examples of instantiating this transformation via YAML: - type: ConfigAddFields fields: - # hardcoded constant + ### hardcoded constant - path: ["path"] value: "static_value" - # nested path + ### nested path - path: ["path", "to", "field"] value: "static" - # from config + ### from config - path: ["derived_field"] value: "{{ config.original_field }}" - # by supplying any valid Jinja template directive or expression + ### by supplying any valid Jinja template directive or expression - path: ["two_times_two"] value: "{{ 2 * 2 }}" @@ -90,6 +75,7 @@ def __post_init__(self) -> None: add_field.path, InterpolatedString.create(add_field.value, parameters={}), value_type=add_field.value_type, + parameters={}, ) ) else: @@ -98,6 +84,7 @@ def __post_init__(self) -> None: add_field.path, add_field.value, value_type=add_field.value_type, + parameters={}, ) ) diff --git a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py index 3f7120d0a..f56da0fff 100644 --- a/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py +++ b/airbyte_cdk/sources/declarative/transformations/config_transformations/remap_field.py @@ -20,7 +20,7 @@ class ConfigRemapField(ConfigTransformation): """ map: Mapping[str, Any] - field_path: List[Union[InterpolatedString, str]] + field_path: List[str] config: Mapping[str, Any] = field(default_factory=dict) def __post_init__(self) -> None: diff --git a/airbyte_cdk/sources/declarative/validators/dpath_validator.py b/airbyte_cdk/sources/declarative/validators/dpath_validator.py index e23f35e2d..05cb12316 100644 --- a/airbyte_cdk/sources/declarative/validators/dpath_validator.py +++ b/airbyte_cdk/sources/declarative/validators/dpath_validator.py @@ -19,7 +19,7 @@ class DpathValidator(Validator): and applies a validation strategy to it. """ - field_path: List[Union[InterpolatedString, str]] + field_path: List[str] strategy: ValidationStrategy def __post_init__(self) -> None: diff --git a/unit_tests/sources/declarative/spec/test_spec.py b/unit_tests/sources/declarative/spec/test_spec.py index bdd7ce857..0824d41cd 100644 --- a/unit_tests/sources/declarative/spec/test_spec.py +++ b/unit_tests/sources/declarative/spec/test_spec.py @@ -1,7 +1,9 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # +from unittest.mock import Mock, mock_open + import pytest from airbyte_cdk.models import ( @@ -37,7 +39,22 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import ( State as component_declarative_oauth_state, ) +from airbyte_cdk.sources.declarative.spec.spec import ConfigMigration from airbyte_cdk.sources.declarative.spec.spec import Spec as component_spec +from airbyte_cdk.sources.declarative.transformations.add_fields import AddedFieldDefinition +from airbyte_cdk.sources.declarative.transformations.config_transformations.add_fields import ( + ConfigAddFields, +) +from airbyte_cdk.sources.declarative.transformations.config_transformations.remap_field import ( + ConfigRemapField, +) +from airbyte_cdk.sources.declarative.transformations.config_transformations.remove_fields import ( + ConfigRemoveFields, +) +from airbyte_cdk.sources.declarative.validators.dpath_validator import DpathValidator +from airbyte_cdk.sources.declarative.validators.validate_adheres_to_schema import ( + ValidateAdheresToSchema, +) @pytest.mark.parametrize( @@ -142,3 +159,200 @@ ) def test_spec(spec, expected_connection_specification) -> None: assert spec.generate_spec() == expected_connection_specification + +@pytest.fixture +def migration_mocks(monkeypatch): + mock_message_repository = Mock() + mock_message_repository.consume_queue.return_value = [Mock()] + + mock_source = Mock() + mock_entrypoint = Mock() + mock_entrypoint.extract_config.return_value = "/fake/config/path" + monkeypatch.setattr("airbyte_cdk.sources.declarative.spec.spec.AirbyteEntrypoint", lambda _: mock_entrypoint) + + _mock_open = mock_open() + mock_json_dump = Mock() + mock_print = Mock() + mock_serializer_dump = Mock() + + mock_decoded_bytes = Mock() + mock_decoded_bytes.decode.return_value = "decoded_message" + mock_orjson_dumps = Mock(return_value=mock_decoded_bytes) + + monkeypatch.setattr("builtins.open", _mock_open) + monkeypatch.setattr("json.dump", mock_json_dump) + monkeypatch.setattr("builtins.print", mock_print) + monkeypatch.setattr("airbyte_cdk.models.airbyte_protocol_serializers.AirbyteMessageSerializer.dump", mock_serializer_dump) + monkeypatch.setattr("airbyte_cdk.sources.declarative.spec.spec.orjson.dumps", mock_orjson_dumps) + + return { + "message_repository": mock_message_repository, + "source": mock_source, + "open": _mock_open, + "json_dump": mock_json_dump, + "print": mock_print, + "serializer_dump": mock_serializer_dump, + "orjson_dumps": mock_orjson_dumps, + "decoded_bytes": mock_decoded_bytes + } + +def test_given_unmigrated_config_when_migrating_then_config_is_migrated(migration_mocks) -> None: + input_config = {"planet": "CRSC"} + + spec = component_spec( + connection_specification={}, + parameters={}, + config_migrations=[ + ConfigMigration( + description="Test migration", + transformations=[ + ConfigRemapField( + map={"CRSC": "Coruscant"}, + field_path=["planet"], + config=input_config + ) + ] + ) + ] + ) + spec.message_repository = migration_mocks["message_repository"] + + spec.migrate_config(["spec"], migration_mocks["source"], input_config) + + migration_mocks["message_repository"].emit_message.assert_called_once() + migration_mocks["open"].assert_called_once_with("/fake/config/path", "w") + migration_mocks["json_dump"].assert_called_once() + migration_mocks["print"].assert_called() + migration_mocks["serializer_dump"].assert_called() + migration_mocks["orjson_dumps"].assert_called() + migration_mocks["decoded_bytes"].decode.assert_called() + + +def test_given_already_migrated_config_no_control_message_is_emitted(migration_mocks) -> None: + input_config = {"planet": "Coruscant"} + + spec = component_spec( + connection_specification={}, + parameters={}, + config_migrations=[ + ConfigMigration( + description="Test migration", + transformations=[ + ConfigRemapField( + map={"CRSC": "Coruscant"}, + field_path=["planet"], + config=input_config + ) + ] + ) + ] + ) + spec.message_repository = migration_mocks["message_repository"] + + spec.migrate_config(["spec"], migration_mocks["source"], input_config) + + migration_mocks["message_repository"].emit_message.assert_not_called() + migration_mocks["open"].assert_not_called() + migration_mocks["json_dump"].assert_not_called() + migration_mocks["print"].assert_not_called() + migration_mocks["serializer_dump"].assert_not_called() + migration_mocks["orjson_dumps"].assert_not_called() + migration_mocks["decoded_bytes"].decode.assert_not_called() + +def test_given_list_of_transformations_when_transform_config_then_config_is_transformed() -> None: + input_config = {"planet_code": "CRSC"} + spec = component_spec( + connection_specification={}, + parameters={}, + config_transformations=[ + ConfigAddFields( + fields=[ + AddedFieldDefinition( + path=["planet_name"], + value="{{ config['planet_code'] }}", + value_type=None, + parameters={} + ), + AddedFieldDefinition( + path=["planet_population"], + value="{{ config['planet_code'] }}", + value_type=None, + parameters={} + ), + ] + ), + ConfigRemapField( + map={ + "CRSC": "Coruscant", + }, + field_path=["planet_name"], + config=input_config + ), + ConfigRemapField( + map={ + "CRSC": 3_000_000_000_000, + }, + field_path=["planet_population"], + config=input_config + ), + ConfigRemoveFields( + field_pointers=["planet_code"], + ) + ] + ) + assert spec.transform_config(input_config) == {"planet_name": "Coruscant", "planet_population": 3_000_000_000_000} + + +def test_given_valid_config_value_when_validating_then_no_exception_is_raised() -> None: + spec = component_spec( + connection_specification={}, + parameters={}, + config_validations=[ + DpathValidator( + field_path=["test_field"], + strategy=ValidateAdheresToSchema(schema={ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Test Spec", + "type": "object", + "required": [], + "additionalProperties": False, + "properties": { + "field_to_validate": { + "type": "string", + "title": "Name", + "description": "The name of the test spec", + "airbyte_secret": False, + } + } + }) + ) + ] + ) + input_config = {"test_field": {"field_to_validate": "test"}} + spec.validate_config(input_config) + +def test_given_invalid_config_value_when_validating_then_exception_is_raised() -> None: + spec = component_spec( + connection_specification={}, + parameters={}, + config_validations=[ + DpathValidator(field_path=["test_field"], strategy=ValidateAdheresToSchema(schema={ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Test Spec", + "type": "object", + "required": [], + "properties": { + "field_to_validate": { + "type": "string", + "title": "Name", + "description": "The name of the test spec", + "airbyte_secret": False, + } + } + })) + ] + ) + input_config = {"test_field": {"field_to_validate": 123}} + + with pytest.raises(Exception): + spec.validate_config(input_config) diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py index dd6b79248..1916adf2a 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py @@ -15,8 +15,8 @@ def test_given_valid_static_value_fields_added(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["new_field"], value="static_value"), - AddedFieldDefinition(path=["another_field"], value="another_value"), + AddedFieldDefinition(path=["new_field"], value="static_value", value_type=None, parameters={}), + AddedFieldDefinition(path=["another_field"], value="another_value", value_type=None, parameters={}), ] ) config = {} @@ -32,7 +32,7 @@ def test_given_valid_static_value_fields_added(): def test_given_valid_nested_fields_static_value_added(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["parent", "child", "grandchild"], value="nested_value"), + AddedFieldDefinition(path=["parent", "child", "grandchild"], value="nested_value", value_type=None, parameters={}), ] ) config = {} @@ -45,8 +45,8 @@ def test_given_valid_nested_fields_static_value_added(): def test_given_valid_interpolated_input_field_added(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["derived_field"], value="{{ config.original_field }}"), - AddedFieldDefinition(path=["expression_result"], value="{{ 2 * 3 }}"), + AddedFieldDefinition(path=["derived_field"], value="{{ config.original_field }}", value_type=None, parameters={}), + AddedFieldDefinition(path=["expression_result"], value="{{ 2 * 3 }}", value_type=None, parameters={}), ] ) config = {"original_field": "original_value"} @@ -62,16 +62,16 @@ def test_given_valid_interpolated_input_field_added(): def test_given_invalid_field_raises_exception(): with pytest.raises(ValueError): - AddFields(fields=[AddedFieldDefinition(path=[], value="value")]) + AddFields(fields=[AddedFieldDefinition(path=[], value="value", value_type=None, parameters={})]) with pytest.raises(ValueError): - AddFields(fields=[AddedFieldDefinition(path=["valid_path"], value=123)]) + AddFields(fields=[AddedFieldDefinition(path=["valid_path"], value=123, value_type=None, parameters={})]) def test_given_field_already_exists_value_is_overwritten(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["existing_field"], value="new_value"), + AddedFieldDefinition(path=["existing_field"], value="new_value", value_type=None, parameters={}), ] ) config = {"existing_field": "existing_value"} @@ -84,7 +84,7 @@ def test_given_field_already_exists_value_is_overwritten(): def test_with_condition_only_adds_fields_when_condition_is_met(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["conditional_field"], value="added_value"), + AddedFieldDefinition(path=["conditional_field"], value="added_value", value_type=None, parameters={}), ], condition="{{ config.flag == true }}", ) From 12043e524b1c047f7da6c201dc6919edc53eb3e8 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 14 May 2025 17:11:33 -0700 Subject: [PATCH 34/56] chore: format --- .../models/declarative_component_schema.py | 102 +++++------ .../parsers/model_to_component_factory.py | 19 +- .../sources/declarative/spec/test_spec.py | 169 ++++++++++-------- .../test_config_add_fields.py | 44 ++++- 4 files changed, 179 insertions(+), 155 deletions(-) diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index d716f6d86..6fc14aa52 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -618,9 +618,7 @@ class OAuthAuthenticator(BaseModel): scopes: Optional[List[str]] = Field( None, description="List of scopes that should be granted to the access token.", - examples=[ - ["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"] - ], + examples=[["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"]], title="Scopes", ) token_expiry_date: Optional[str] = Field( @@ -1126,28 +1124,24 @@ class OAuthConfigSpecification(BaseModel): class Config: extra = Extra.allow - oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = ( - Field( - None, - description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", - examples=[ - {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, - { - "app_id": { - "type": "string", - "path_in_connector_config": ["info", "app_id"], - } - }, - ], - title="OAuth user input", - ) + oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = Field( + None, + description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", + examples=[ + {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, + { + "app_id": { + "type": "string", + "path_in_connector_config": ["info", "app_id"], + } + }, + ], + title="OAuth user input", ) - oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = ( - Field( - None, - description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', - title="DeclarativeOAuth Connector Specification", - ) + oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = Field( + None, + description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', + title="DeclarativeOAuth Connector Specification", ) complete_oauth_output_specification: Optional[Dict[str, Any]] = Field( None, @@ -1165,9 +1159,7 @@ class Config: complete_oauth_server_input_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }", - examples=[ - {"client_id": {"type": "string"}, "client_secret": {"type": "string"}} - ], + examples=[{"client_id": {"type": "string"}, "client_secret": {"type": "string"}}], title="OAuth input specification", ) complete_oauth_server_output_specification: Optional[Dict[str, Any]] = Field( @@ -1964,9 +1956,7 @@ class RecordSelector(BaseModel): description="Responsible for filtering records to be emitted by the Source.", title="Record Filter", ) - schema_normalization: Optional[ - Union[SchemaNormalization, CustomSchemaNormalization] - ] = Field( + schema_normalization: Optional[Union[SchemaNormalization, CustomSchemaNormalization]] = Field( None, description="Responsible for normalization according to the schema.", title="Schema Normalization", @@ -2107,9 +2097,7 @@ class ConfigMigration(BaseModel): description: Optional[str] = Field( None, description="The description/purpose of the config migration." ) - transformations: List[ - Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields] - ] = Field( + transformations: List[Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields]] = Field( ..., description="The list of transformations that will attempt to be applied on an incoming unmigrated config. The transformations will be applied in the order they are defined.", title="Transformations", @@ -2307,9 +2295,7 @@ class Config: extra = Extra.allow type: Literal["DeclarativeStream"] - name: Optional[str] = Field( - "", description="The stream name.", example=["Users"], title="Name" - ) + name: Optional[str] = Field("", description="The stream name.", example=["Users"], title="Name") retriever: Union[SimpleRetriever, AsyncRetriever, CustomRetriever] = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages.", @@ -2487,20 +2473,18 @@ class HttpRequester(BaseModelWithDeprecations): description="Allows for retrieving a dynamic set of properties from an API endpoint which can be injected into outbound request using the stream_partition.extra_fields.", title="Fetch Properties from Endpoint", ) - request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = ( - Field( - None, - description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", - examples=[ - {"unit": "day"}, - { - "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' - }, - {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, - {"sort_by[asc]": "updated_at"}, - ], - title="Query Parameters", - ) + request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = Field( + None, + description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", + examples=[ + {"unit": "day"}, + { + "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' + }, + {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, + {"sort_by[asc]": "updated_at"}, + ], + title="Query Parameters", ) request_headers: Optional[Union[Dict[str, str], str]] = Field( None, @@ -2690,9 +2674,7 @@ class QueryProperties(BaseModel): class StateDelegatingStream(BaseModel): type: Literal["StateDelegatingStream"] - name: str = Field( - ..., description="The stream name.", example=["Users"], title="Name" - ) + name: str = Field(..., description="The stream name.", example=["Users"], title="Name") full_refresh_stream: DeclarativeStream = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages when the state is empty or not provided.", @@ -2781,9 +2763,7 @@ class AsyncRetriever(BaseModel): ) download_extractor: Optional[ Union[DpathExtractor, CustomRecordExtractor, ResponseToFileExtractor] - ] = Field( - None, description="Responsible for fetching the records from provided urls." - ) + ] = Field(None, description="Responsible for fetching the records from provided urls.") creation_requester: Union[HttpRequester, CustomRequester] = Field( ..., description="Requester component that describes how to prepare HTTP requests to send to the source API to create the async server-side job.", @@ -2923,12 +2903,10 @@ class DynamicDeclarativeStream(BaseModel): stream_template: DeclarativeStream = Field( ..., description="Reference to the stream template.", title="Stream Template" ) - components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = ( - Field( - ..., - description="Component resolve and populates stream templates with components values.", - title="Components Resolver", - ) + components_resolver: Union[HttpComponentsResolver, ConfigComponentsResolver] = Field( + ..., + description="Component resolve and populates stream templates with components values.", + title="Components Resolver", ) diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index e87aa5f93..40576d601 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -820,8 +820,9 @@ def _collect_model_deprecations(self, model: BaseModelWithDeprecations) -> None: if log not in self._collected_deprecation_logs: self._collected_deprecation_logs.append(log) - - def create_config_migration(self, model: ConfigMigrationModel, config: Config) -> ConfigMigration: + def create_config_migration( + self, model: ConfigMigrationModel, config: Config + ) -> ConfigMigration: transformations = [] for transformation in model.transformations: transformations.append(self._create_component_from_model(transformation, config)) @@ -831,7 +832,9 @@ def create_config_migration(self, model: ConfigMigrationModel, config: Config) - transformations=transformations, ) - def create_config_add_fields(self, model: ConfigAddFieldsModel, config: Config) -> ConfigAddFields: + def create_config_add_fields( + self, model: ConfigAddFieldsModel, config: Config + ) -> ConfigAddFields: fields = [self._create_component_from_model(field, config) for field in model.fields] return ConfigAddFields( fields=fields, @@ -854,9 +857,7 @@ def create_config_remap_field(model: ConfigRemapFieldModel, config: Config) -> C config=config, ) - def create_dpath_validator(self, model: DpathValidatorModel, config: Config) -> DpathValidator: - strategy = self._create_component_from_model(model.validation_strategy, config) return DpathValidator( @@ -864,7 +865,9 @@ def create_dpath_validator(self, model: DpathValidatorModel, config: Config) -> strategy=strategy, ) - def create_predicate_validator(self, model: PredicateValidatorModel, config: Config) -> PredicateValidator: + def create_predicate_validator( + self, model: PredicateValidatorModel, config: Config + ) -> PredicateValidator: strategy = self._create_component_from_model(model.validation_strategy, config) return PredicateValidator( @@ -873,7 +876,9 @@ def create_predicate_validator(self, model: PredicateValidatorModel, config: Con ) @staticmethod - def create_validate_adheres_to_schema(model: ValidateAdheresToSchemaModel) -> ValidateAdheresToSchema: + def create_validate_adheres_to_schema( + model: ValidateAdheresToSchemaModel, + ) -> ValidateAdheresToSchema: base_schema = cast(Mapping[str, Any], model.base_schema) return ValidateAdheresToSchema( schema=base_schema, diff --git a/unit_tests/sources/declarative/spec/test_spec.py b/unit_tests/sources/declarative/spec/test_spec.py index 0824d41cd..0d7a33dea 100644 --- a/unit_tests/sources/declarative/spec/test_spec.py +++ b/unit_tests/sources/declarative/spec/test_spec.py @@ -160,6 +160,7 @@ def test_spec(spec, expected_connection_specification) -> None: assert spec.generate_spec() == expected_connection_specification + @pytest.fixture def migration_mocks(monkeypatch): mock_message_repository = Mock() @@ -168,7 +169,9 @@ def migration_mocks(monkeypatch): mock_source = Mock() mock_entrypoint = Mock() mock_entrypoint.extract_config.return_value = "/fake/config/path" - monkeypatch.setattr("airbyte_cdk.sources.declarative.spec.spec.AirbyteEntrypoint", lambda _: mock_entrypoint) + monkeypatch.setattr( + "airbyte_cdk.sources.declarative.spec.spec.AirbyteEntrypoint", lambda _: mock_entrypoint + ) _mock_open = mock_open() mock_json_dump = Mock() @@ -182,7 +185,10 @@ def migration_mocks(monkeypatch): monkeypatch.setattr("builtins.open", _mock_open) monkeypatch.setattr("json.dump", mock_json_dump) monkeypatch.setattr("builtins.print", mock_print) - monkeypatch.setattr("airbyte_cdk.models.airbyte_protocol_serializers.AirbyteMessageSerializer.dump", mock_serializer_dump) + monkeypatch.setattr( + "airbyte_cdk.models.airbyte_protocol_serializers.AirbyteMessageSerializer.dump", + mock_serializer_dump, + ) monkeypatch.setattr("airbyte_cdk.sources.declarative.spec.spec.orjson.dumps", mock_orjson_dumps) return { @@ -193,9 +199,10 @@ def migration_mocks(monkeypatch): "print": mock_print, "serializer_dump": mock_serializer_dump, "orjson_dumps": mock_orjson_dumps, - "decoded_bytes": mock_decoded_bytes + "decoded_bytes": mock_decoded_bytes, } + def test_given_unmigrated_config_when_migrating_then_config_is_migrated(migration_mocks) -> None: input_config = {"planet": "CRSC"} @@ -207,13 +214,11 @@ def test_given_unmigrated_config_when_migrating_then_config_is_migrated(migratio description="Test migration", transformations=[ ConfigRemapField( - map={"CRSC": "Coruscant"}, - field_path=["planet"], - config=input_config + map={"CRSC": "Coruscant"}, field_path=["planet"], config=input_config ) - ] + ], ) - ] + ], ) spec.message_repository = migration_mocks["message_repository"] @@ -239,13 +244,11 @@ def test_given_already_migrated_config_no_control_message_is_emitted(migration_m description="Test migration", transformations=[ ConfigRemapField( - map={"CRSC": "Coruscant"}, - field_path=["planet"], - config=input_config + map={"CRSC": "Coruscant"}, field_path=["planet"], config=input_config ) - ] + ], ) - ] + ], ) spec.message_repository = migration_mocks["message_repository"] @@ -259,58 +262,63 @@ def test_given_already_migrated_config_no_control_message_is_emitted(migration_m migration_mocks["orjson_dumps"].assert_not_called() migration_mocks["decoded_bytes"].decode.assert_not_called() + def test_given_list_of_transformations_when_transform_config_then_config_is_transformed() -> None: input_config = {"planet_code": "CRSC"} spec = component_spec( - connection_specification={}, - parameters={}, - config_transformations=[ - ConfigAddFields( - fields=[ - AddedFieldDefinition( - path=["planet_name"], - value="{{ config['planet_code'] }}", - value_type=None, - parameters={} - ), - AddedFieldDefinition( - path=["planet_population"], - value="{{ config['planet_code'] }}", - value_type=None, - parameters={} - ), - ] - ), - ConfigRemapField( - map={ - "CRSC": "Coruscant", - }, - field_path=["planet_name"], - config=input_config - ), - ConfigRemapField( - map={ - "CRSC": 3_000_000_000_000, - }, - field_path=["planet_population"], - config=input_config - ), - ConfigRemoveFields( - field_pointers=["planet_code"], - ) - ] - ) - assert spec.transform_config(input_config) == {"planet_name": "Coruscant", "planet_population": 3_000_000_000_000} + connection_specification={}, + parameters={}, + config_transformations=[ + ConfigAddFields( + fields=[ + AddedFieldDefinition( + path=["planet_name"], + value="{{ config['planet_code'] }}", + value_type=None, + parameters={}, + ), + AddedFieldDefinition( + path=["planet_population"], + value="{{ config['planet_code'] }}", + value_type=None, + parameters={}, + ), + ] + ), + ConfigRemapField( + map={ + "CRSC": "Coruscant", + }, + field_path=["planet_name"], + config=input_config, + ), + ConfigRemapField( + map={ + "CRSC": 3_000_000_000_000, + }, + field_path=["planet_population"], + config=input_config, + ), + ConfigRemoveFields( + field_pointers=["planet_code"], + ), + ], + ) + assert spec.transform_config(input_config) == { + "planet_name": "Coruscant", + "planet_population": 3_000_000_000_000, + } def test_given_valid_config_value_when_validating_then_no_exception_is_raised() -> None: spec = component_spec( - connection_specification={}, - parameters={}, - config_validations=[ - DpathValidator( - field_path=["test_field"], - strategy=ValidateAdheresToSchema(schema={ + connection_specification={}, + parameters={}, + config_validations=[ + DpathValidator( + field_path=["test_field"], + strategy=ValidateAdheresToSchema( + schema={ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Test Spec", "type": "object", @@ -323,34 +331,41 @@ def test_given_valid_config_value_when_validating_then_no_exception_is_raised() "description": "The name of the test spec", "airbyte_secret": False, } - } - }) - ) - ] - ) + }, + } + ), + ) + ], + ) input_config = {"test_field": {"field_to_validate": "test"}} spec.validate_config(input_config) + def test_given_invalid_config_value_when_validating_then_exception_is_raised() -> None: spec = component_spec( connection_specification={}, parameters={}, config_validations=[ - DpathValidator(field_path=["test_field"], strategy=ValidateAdheresToSchema(schema={ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Test Spec", - "type": "object", - "required": [], - "properties": { - "field_to_validate": { - "type": "string", - "title": "Name", - "description": "The name of the test spec", - "airbyte_secret": False, + DpathValidator( + field_path=["test_field"], + strategy=ValidateAdheresToSchema( + schema={ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Test Spec", + "type": "object", + "required": [], + "properties": { + "field_to_validate": { + "type": "string", + "title": "Name", + "description": "The name of the test spec", + "airbyte_secret": False, + } + }, } - } - })) - ] + ), + ) + ], ) input_config = {"test_field": {"field_to_validate": 123}} diff --git a/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py index 1916adf2a..062c20b7e 100644 --- a/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py +++ b/unit_tests/sources/declarative/transformations/config_transformations/test_config_add_fields.py @@ -15,8 +15,12 @@ def test_given_valid_static_value_fields_added(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["new_field"], value="static_value", value_type=None, parameters={}), - AddedFieldDefinition(path=["another_field"], value="another_value", value_type=None, parameters={}), + AddedFieldDefinition( + path=["new_field"], value="static_value", value_type=None, parameters={} + ), + AddedFieldDefinition( + path=["another_field"], value="another_value", value_type=None, parameters={} + ), ] ) config = {} @@ -32,7 +36,12 @@ def test_given_valid_static_value_fields_added(): def test_given_valid_nested_fields_static_value_added(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["parent", "child", "grandchild"], value="nested_value", value_type=None, parameters={}), + AddedFieldDefinition( + path=["parent", "child", "grandchild"], + value="nested_value", + value_type=None, + parameters={}, + ), ] ) config = {} @@ -45,8 +54,15 @@ def test_given_valid_nested_fields_static_value_added(): def test_given_valid_interpolated_input_field_added(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["derived_field"], value="{{ config.original_field }}", value_type=None, parameters={}), - AddedFieldDefinition(path=["expression_result"], value="{{ 2 * 3 }}", value_type=None, parameters={}), + AddedFieldDefinition( + path=["derived_field"], + value="{{ config.original_field }}", + value_type=None, + parameters={}, + ), + AddedFieldDefinition( + path=["expression_result"], value="{{ 2 * 3 }}", value_type=None, parameters={} + ), ] ) config = {"original_field": "original_value"} @@ -62,16 +78,24 @@ def test_given_valid_interpolated_input_field_added(): def test_given_invalid_field_raises_exception(): with pytest.raises(ValueError): - AddFields(fields=[AddedFieldDefinition(path=[], value="value", value_type=None, parameters={})]) + AddFields( + fields=[AddedFieldDefinition(path=[], value="value", value_type=None, parameters={})] + ) with pytest.raises(ValueError): - AddFields(fields=[AddedFieldDefinition(path=["valid_path"], value=123, value_type=None, parameters={})]) + AddFields( + fields=[ + AddedFieldDefinition(path=["valid_path"], value=123, value_type=None, parameters={}) + ] + ) def test_given_field_already_exists_value_is_overwritten(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["existing_field"], value="new_value", value_type=None, parameters={}), + AddedFieldDefinition( + path=["existing_field"], value="new_value", value_type=None, parameters={} + ), ] ) config = {"existing_field": "existing_value"} @@ -84,7 +108,9 @@ def test_given_field_already_exists_value_is_overwritten(): def test_with_condition_only_adds_fields_when_condition_is_met(): transformation = AddFields( fields=[ - AddedFieldDefinition(path=["conditional_field"], value="added_value", value_type=None, parameters={}), + AddedFieldDefinition( + path=["conditional_field"], value="added_value", value_type=None, parameters={} + ), ], condition="{{ config.flag == true }}", ) From 9cc4d117e24a5b78d69a6056e6e78db6551c3e18 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 14 May 2025 17:20:30 -0700 Subject: [PATCH 35/56] remove errant dependencies --- poetry.lock | 352 ++++--------------------------------------------- pyproject.toml | 2 - 2 files changed, 25 insertions(+), 329 deletions(-) diff --git a/poetry.lock b/poetry.lock index 01a3ee57c..36199b737 100644 --- a/poetry.lock +++ b/poetry.lock @@ -170,26 +170,27 @@ files = [ [[package]] name = "anyio" -version = "3.7.1" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.7" +optional = true +python-versions = ">=3.9" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "async-timeout" @@ -283,26 +284,6 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] -[[package]] -name = "beartype" -version = "0.20.2" -description = "Unbearably fast near-real-time hybrid runtime-static type-checking in pure Python." -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "beartype-0.20.2-py3-none-any.whl", hash = "sha256:5171a91ecf01438a59884f0cde37d2d5da2c992198b53d6ba31db3940f47ff04"}, - {file = "beartype-0.20.2.tar.gz", hash = "sha256:38c60c065ad99364a8c767e8a0e71ba8263d467b91414ed5dcffb7758a2e8079"}, -] - -[package.extras] -dev = ["autoapi (>=0.9.0)", "click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] -doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test = ["click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] -test-tox = ["click", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "typing-extensions (>=3.10.0.0)", "xarray"] -test-tox-coverage = ["coverage (>=5.5)"] - [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -987,30 +968,6 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] -[[package]] -name = "dagger-io" -version = "0.18.6" -description = "A client package for running Dagger pipelines in Python." -optional = false -python-versions = ">=3.10" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "dagger_io-0.18.6-py3-none-any.whl", hash = "sha256:357e426ce42ec5b27da87a9e3145e6b5ba5fd02dbbad317ecc1ff09c1e1a44cd"}, - {file = "dagger_io-0.18.6.tar.gz", hash = "sha256:165c1c16165feb12cfdd89fce50c898e6ebc5a9acaaa0b725569317c404e45ce"}, -] - -[package.dependencies] -anyio = ">=3.6.2" -beartype = ">=0.18.2" -cattrs = ">=24.1.0" -gql = {version = ">=3.5.0", extras = ["httpx"]} -opentelemetry-exporter-otlp-proto-http = ">=1.23.0" -opentelemetry-sdk = ">=1.23.0" -platformdirs = ">=2.6.2" -rich = ">=10.11.0" -typing-extensions = ">=4.13.0" - [[package]] name = "dataclasses-json" version = "0.6.7" @@ -1028,25 +985,6 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" -[[package]] -name = "deprecated" -version = "1.2.18" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, - {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] - [[package]] name = "deptry" version = "0.23.0" @@ -1537,50 +1475,6 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0)"] -[[package]] -name = "gql" -version = "3.5.2" -description = "GraphQL client for Python" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "gql-3.5.2-py2.py3-none-any.whl", hash = "sha256:c830ffc38b3997b2a146317b27758305ab3d0da3bde607b49f34e32affb23ba2"}, - {file = "gql-3.5.2.tar.gz", hash = "sha256:07e1325b820c8ba9478e95de27ce9f23250486e7e79113dbb7659a442dc13e74"}, -] - -[package.dependencies] -anyio = ">=3.0,<5" -backoff = ">=1.11.1,<3.0" -graphql-core = ">=3.2,<3.2.5" -httpx = {version = ">=0.23.1,<1", optional = true, markers = "extra == \"httpx\""} -yarl = ">=1.6,<2.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] -all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] -botocore = ["botocore (>=1.21,<2)"] -dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] -httpx = ["httpx (>=0.23.1,<1)"] -requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] -test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] -test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)"] -websockets = ["websockets (>=10,<12)"] - -[[package]] -name = "graphql-core" -version = "3.2.4" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = false -python-versions = "<4,>=3.6" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "graphql-core-3.2.4.tar.gz", hash = "sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264"}, - {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, -] - [[package]] name = "greenlet" version = "3.1.1" @@ -1732,7 +1626,7 @@ protobuf = ">=4.21.6" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false +optional = true python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1745,7 +1639,7 @@ files = [ name = "httpcore" version = "1.0.7" description = "A minimal low-level HTTP client." -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1768,7 +1662,7 @@ trio = ["trio (>=0.22.0,<1.0)"] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1810,10 +1704,10 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, @@ -2690,10 +2584,10 @@ test = ["Cython", "greenlet", "ipython", "packaging", "pytest", "pytest-cov", "p name = "multidict" version = "6.1.0" description = "multidict implementation" -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -2989,112 +2883,6 @@ files = [ [package.dependencies] et-xmlfile = "*" -[[package]] -name = "opentelemetry-api" -version = "1.32.1" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_api-1.32.1-py3-none-any.whl", hash = "sha256:bbd19f14ab9f15f0e85e43e6a958aa4cb1f36870ee62b7fd205783a112012724"}, - {file = "opentelemetry_api-1.32.1.tar.gz", hash = "sha256:a5be71591694a4d9195caf6776b055aa702e964d961051a0715d05f8632c32fb"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<8.7.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.32.1" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.32.1-py3-none-any.whl", hash = "sha256:a1e9ad3d0d9a9405c7ff8cdb54ba9b265da16da9844fe36b8c9661114b56c5d9"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.32.1.tar.gz", hash = "sha256:da4edee4f24aaef109bfe924efad3a98a2e27c91278115505b298ee61da5d68e"}, -] - -[package.dependencies] -opentelemetry-proto = "1.32.1" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.32.1" -description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.32.1-py3-none-any.whl", hash = "sha256:3cc048b0c295aa2cbafb883feaf217c7525b396567eeeabb5459affb08b7fefe"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.32.1.tar.gz", hash = "sha256:f854a6e7128858213850dbf1929478a802faf50e799ffd2eb4d7424390023828"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.32.1" -opentelemetry-proto = "1.32.1" -opentelemetry-sdk = ">=1.32.1,<1.33.0" -requests = ">=2.7,<3.0" - -[[package]] -name = "opentelemetry-proto" -version = "1.32.1" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_proto-1.32.1-py3-none-any.whl", hash = "sha256:fe56df31033ab0c40af7525f8bf4c487313377bbcfdf94184b701a8ccebc800e"}, - {file = "opentelemetry_proto-1.32.1.tar.gz", hash = "sha256:bc6385ccf87768f029371535312071a2d09e6c9ebf119ac17dbc825a6a56ba53"}, -] - -[package.dependencies] -protobuf = ">=5.0,<6.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.32.1" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_sdk-1.32.1-py3-none-any.whl", hash = "sha256:bba37b70a08038613247bc42beee5a81b0ddca422c7d7f1b097b32bf1c7e2f17"}, - {file = "opentelemetry_sdk-1.32.1.tar.gz", hash = "sha256:8ef373d490961848f525255a42b193430a0637e064dd132fd2a014d94792a092"}, -] - -[package.dependencies] -opentelemetry-api = "1.32.1" -opentelemetry-semantic-conventions = "0.53b1" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.53b1" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_semantic_conventions-0.53b1-py3-none-any.whl", hash = "sha256:21df3ed13f035f8f3ea42d07cbebae37020367a53b47f1ebee3b10a381a00208"}, - {file = "opentelemetry_semantic_conventions-0.53b1.tar.gz", hash = "sha256:4c5a6fede9de61211b2e9fc1e02e8acacce882204cd770177342b6a3be682992"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -opentelemetry-api = "1.32.1" - [[package]] name = "orjson" version = "3.10.15" @@ -3525,10 +3313,10 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "propcache" version = "0.2.1" description = "Accelerated property cache" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -5149,7 +4937,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -optional = false +optional = true python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -5848,96 +5636,6 @@ files = [ [package.dependencies] tzdata = {version = ">=2020.1", markers = "sys_platform == \"win32\""} -[[package]] -name = "wrapt" -version = "1.17.2" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, - {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, - {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, - {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, - {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, - {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, - {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, - {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, - {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, - {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, - {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, - {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, - {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, - {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, - {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, - {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, - {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, -] - [[package]] name = "xlsxwriter" version = "3.2.0" @@ -5968,10 +5666,10 @@ files = [ name = "yarl" version = "1.18.3" description = "Yet another URL library" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -6066,10 +5764,10 @@ propcache = ">=0.2.0" name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -6092,4 +5790,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "cde25949898eaef637105fd376a030b96b30b197e819dff2d015e03479c3b4ac" +content-hash = "3462b02e03e9f7edd9dec23acde60ad4f7128ace08ce1787556aa0b4bcd25a86" diff --git a/pyproject.toml b/pyproject.toml index 24d4696ab..74d2f6d56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,8 +90,6 @@ sqlalchemy = {version = "^2.0,!=2.0.36", optional = true } xmltodict = ">=0.13,<0.15" anyascii = "^0.3.2" whenever = "^0.6.16" -dagger-io = "^0.18.6" -anyio = "<4.0.0" [tool.poetry.group.dev.dependencies] freezegun = "*" From 1d908a47a2f2a326c99ff257a0a69204bd1965fb Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 15 May 2025 08:48:40 -0700 Subject: [PATCH 36/56] fix errors --- .../declarative/declarative_component_schema.yaml | 4 ++-- .../models/declarative_component_schema.py | 5 ++++- .../parsers/model_to_component_factory.py | 12 ++++++++---- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 2ff6cf765..1758b28ca 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -3806,7 +3806,7 @@ definitions: config_normalization_rules: title: Config Normalization Rules type: object - additional_properties: false + additionalProperties: false properties: config_migrations: title: Config Migrations @@ -4340,7 +4340,7 @@ definitions: properties: type: type: string - enum: [RemapField] + enum: [ConfigRemapField] map: title: Value Mapping description: A mapping of original values to new values. When a field value matches a key in this map, it will be replaced with the corresponding value. diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 6fc14aa52..cd5b97809 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1549,7 +1549,7 @@ class ValidateAdheresToSchema(BaseModel): class ConfigRemapField(BaseModel): - type: Literal["RemapField"] + type: Literal["ConfigRemapField"] map: Union[Dict[str, Any], str] = Field( ..., description="A mapping of original values to new values. When a field value matches a key in this map, it will be replaced with the corresponding value.", @@ -2105,6 +2105,9 @@ class ConfigMigration(BaseModel): class ConfigNormalizationRules(BaseModel): + class Config: + extra = Extra.forbid + config_migrations: Optional[List[ConfigMigration]] = Field( None, description="The discrete migrations that will be applied on the incoming config. Each migration will be applied in the order they are defined.", diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 40576d601..0ee00ff68 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -833,7 +833,7 @@ def create_config_migration( ) def create_config_add_fields( - self, model: ConfigAddFieldsModel, config: Config + self, model: ConfigAddFieldsModel, config: Config, **kwargs: Any ) -> ConfigAddFields: fields = [self._create_component_from_model(field, config) for field in model.fields] return ConfigAddFields( @@ -842,14 +842,18 @@ def create_config_add_fields( ) @staticmethod - def create_config_remove_fields(model: ConfigRemoveFieldsModel) -> ConfigRemoveFields: + def create_config_remove_fields( + model: ConfigRemoveFieldsModel, config: Config, **kwargs: Any + ) -> ConfigRemoveFields: return ConfigRemoveFields( field_pointers=model.field_pointers, condition=model.condition or "", ) @staticmethod - def create_config_remap_field(model: ConfigRemapFieldModel, config: Config) -> ConfigRemapField: + def create_config_remap_field( + model: ConfigRemapFieldModel, config: Config, **kwargs: Any + ) -> ConfigRemapField: mapping = cast(Mapping[str, Any], model.map) return ConfigRemapField( map=mapping, @@ -877,7 +881,7 @@ def create_predicate_validator( @staticmethod def create_validate_adheres_to_schema( - model: ValidateAdheresToSchemaModel, + model: ValidateAdheresToSchemaModel, config: Config, **kwargs: Any ) -> ValidateAdheresToSchema: base_schema = cast(Mapping[str, Any], model.base_schema) return ValidateAdheresToSchema( From 5f3f0c104f2eba7507a6e62adba9cc4f026004eb Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 15 May 2025 09:07:07 -0700 Subject: [PATCH 37/56] revert erroneous AI deletion --- .../sources/declarative/declarative_component_schema.yaml | 2 +- .../sources/declarative/models/declarative_component_schema.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 1758b28ca..e06eb4892 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -168,7 +168,7 @@ definitions: - X-Auth-Token inject_into: title: Inject API Key Into Outgoing HTTP Request - description: Configure how the API Key will be sent in requests to the source API. + description: Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined. "$ref": "#/definitions/RequestOption" examples: - inject_into: header diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index cd5b97809..abe068294 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1654,7 +1654,7 @@ class ApiKeyAuthenticator(BaseModel): ) inject_into: Optional[RequestOption] = Field( None, - description="Configure how the API Key will be sent in requests to the source API.", + description="Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined.", examples=[ {"inject_into": "header", "field_name": "Authorization"}, {"inject_into": "request_parameter", "field_name": "authKey"}, From ded5fcbaf4e36c0caf996621ac0324d7b8133c5f Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 15 May 2025 09:08:37 -0700 Subject: [PATCH 38/56] revert erroneous AI deletion --- .../sources/declarative/declarative_component_schema.yaml | 2 +- .../sources/declarative/models/declarative_component_schema.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 1758b28ca..e06eb4892 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -168,7 +168,7 @@ definitions: - X-Auth-Token inject_into: title: Inject API Key Into Outgoing HTTP Request - description: Configure how the API Key will be sent in requests to the source API. + description: Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined. "$ref": "#/definitions/RequestOption" examples: - inject_into: header diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index cd5b97809..abe068294 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1654,7 +1654,7 @@ class ApiKeyAuthenticator(BaseModel): ) inject_into: Optional[RequestOption] = Field( None, - description="Configure how the API Key will be sent in requests to the source API.", + description="Configure how the API Key will be sent in requests to the source API. Either inject_into or header has to be defined.", examples=[ {"inject_into": "header", "field_name": "Authorization"}, {"inject_into": "request_parameter", "field_name": "authKey"}, From 398349560c21252b29aa21a9395ede1e7b8cf6df Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 19 May 2025 14:17:34 -0700 Subject: [PATCH 39/56] update per comments --- .../declarative_component_schema.yaml | 9 +- .../models/declarative_component_schema.py | 11 +- .../parsers/model_to_component_factory.py | 63 ++-- airbyte_cdk/sources/declarative/spec/spec.py | 9 +- poetry.lock | 335 +++++++++++++++++- pyproject.toml | 1 + 6 files changed, 378 insertions(+), 50 deletions(-) diff --git a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index e06eb4892..74d77daf5 100644 --- a/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -3814,6 +3814,7 @@ definitions: type: array items: "$ref": "#/definitions/ConfigMigration" + default: [] transformations: title: Transformations description: The list of transformations that will be applied on the incoming config at the start of each sync. The transformations will be applied in the order they are defined. @@ -3823,6 +3824,7 @@ definitions: - "$ref": "#/definitions/ConfigRemapField" - "$ref": "#/definitions/ConfigAddFields" - "$ref": "#/definitions/ConfigRemoveFields" + default: [] validations: title: Validations description: The list of validations that will be performed on the incoming config at the start of each sync. @@ -3831,13 +3833,18 @@ definitions: anyOf: - "$ref": "#/definitions/DpathValidator" - "$ref": "#/definitions/PredicateValidator" + default: [] ConfigMigration: title: Config Migration description: A config migration that will be applied on the incoming config at the start of a sync. type: object required: + - type - transformations properties: + type: + type: string + enum: [ConfigMigration] description: type: string description: The description/purpose of the config migration. @@ -4238,7 +4245,7 @@ definitions: - ["data", "{{ parameters.name }}"] - ["data", "*", "record"] validation_strategy: - title: Validation Stragey + title: Validation Strategy description: The condition that the specified config value will be evaluated against anyOf: - "$ref": "#/definitions/ValidateAdheresToSchema" diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index abe068294..3976f4185 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1,3 +1,5 @@ +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. + # generated by datamodel-codegen: # filename: declarative_component_schema.yaml @@ -1995,7 +1997,7 @@ class DpathValidator(BaseModel): validation_strategy: ValidateAdheresToSchema = Field( ..., description="The condition that the specified config value will be evaluated against", - title="Validation Stragey", + title="Validation Strategy", ) @@ -2094,6 +2096,7 @@ class Config: class ConfigMigration(BaseModel): + type: Literal["ConfigMigration"] description: Optional[str] = Field( None, description="The description/purpose of the config migration." ) @@ -2109,19 +2112,19 @@ class Config: extra = Extra.forbid config_migrations: Optional[List[ConfigMigration]] = Field( - None, + [], description="The discrete migrations that will be applied on the incoming config. Each migration will be applied in the order they are defined.", title="Config Migrations", ) transformations: Optional[ List[Union[ConfigRemapField, ConfigAddFields, ConfigRemoveFields]] ] = Field( - None, + [], description="The list of transformations that will be applied on the incoming config at the start of each sync. The transformations will be applied in the order they are defined.", title="Transformations", ) validations: Optional[List[Union[DpathValidator, PredicateValidator]]] = Field( - None, + [], description="The list of validations that will be performed on the incoming config at the start of each sync.", title="Validations", ) diff --git a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index a1a0b9509..e8fa5aa9a 100644 --- a/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -499,7 +499,7 @@ RequestOptionsProvider, ) from airbyte_cdk.sources.declarative.requesters.request_path import RequestPath -from airbyte_cdk.sources.declarative.requesters.requester import HttpMethod +from airbyte_cdk.sources.declarative.requesters.requester import HttpMethod, Requester from airbyte_cdk.sources.declarative.resolvers import ( ComponentMappingDefinition, ConfigComponentsResolver, @@ -542,6 +542,9 @@ ConfigRemapField, ConfigRemoveFields, ) +from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( + ConfigTransformation, +) from airbyte_cdk.sources.declarative.transformations.dpath_flatten_fields import ( DpathFlattenFields, KeyTransformation, @@ -823,9 +826,10 @@ def _collect_model_deprecations(self, model: BaseModelWithDeprecations) -> None: def create_config_migration( self, model: ConfigMigrationModel, config: Config ) -> ConfigMigration: - transformations = [] - for transformation in model.transformations: - transformations.append(self._create_component_from_model(transformation, config)) + transformations: List[ConfigTransformation] = [ + self._create_component_from_model(transformation, config) + for transformation in model.transformations + ] return ConfigMigration( description=model.description, @@ -3604,24 +3608,39 @@ def _get_job_timeout() -> datetime.timedelta: ) def create_spec(self, model: SpecModel, config: Config, **kwargs: Any) -> Spec: - config_migrations = [] - config_transformations = [] - config_validations = [] - - if model.config_normalization_rules: - if model.config_normalization_rules.config_migrations: - for migration in model.config_normalization_rules.config_migrations: - config_migrations.append(self._create_component_from_model(migration, config)) - - if model.config_normalization_rules.transformations: - for transformation in model.config_normalization_rules.transformations: - config_transformations.append( - self._create_component_from_model(transformation, config) - ) - - if model.config_normalization_rules.validations: - for validation in model.config_normalization_rules.validations: - config_validations.append(self._create_component_from_model(validation, config)) + config_migrations = [ + self._create_component_from_model(migration, config) + for migration in ( + model.config_normalization_rules.config_migrations + if ( + model.config_normalization_rules + and model.config_normalization_rules.config_migrations + ) + else [] + ) + ] + config_transformations = [ + self._create_component_from_model(transformation, config) + for transformation in ( + model.config_normalization_rules.transformations + if ( + model.config_normalization_rules + and model.config_normalization_rules.transformations + ) + else [] + ) + ] + config_validations = [ + self._create_component_from_model(validation, config) + for validation in ( + model.config_normalization_rules.validations + if ( + model.config_normalization_rules + and model.config_normalization_rules.validations + ) + else [] + ) + ] return Spec( connection_specification=model.connection_specification, diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 8d00560e4..945289686 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -98,19 +98,14 @@ def migrate_config( for message in self.message_repository.consume_queue(): print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) - def transform_config(self, config: MutableMapping[str, Any]) -> MutableMapping[str, Any]: + def transform_config(self, config: MutableMapping[str, Any]) -> None: """ Apply all config transformations to the provided config. :param config: The user-provided configuration - :return: The transformed configuration """ - mutable_config = dict(config) - for transformation in self.config_transformations: - transformation.transform(mutable_config) - - return mutable_config + transformation.transform(config) def validate_config(self, config: MutableMapping[str, Any]) -> None: """ diff --git a/poetry.lock b/poetry.lock index 36199b737..5456d5387 100644 --- a/poetry.lock +++ b/poetry.lock @@ -172,7 +172,7 @@ files = [ name = "anyio" version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -284,6 +284,26 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "beartype" +version = "0.20.2" +description = "Unbearably fast near-real-time hybrid runtime-static type-checking in pure Python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "beartype-0.20.2-py3-none-any.whl", hash = "sha256:5171a91ecf01438a59884f0cde37d2d5da2c992198b53d6ba31db3940f47ff04"}, + {file = "beartype-0.20.2.tar.gz", hash = "sha256:38c60c065ad99364a8c767e8a0e71ba8263d467b91414ed5dcffb7758a2e8079"}, +] + +[package.extras] +dev = ["autoapi (>=0.9.0)", "click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] +doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] +test = ["click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] +test-tox = ["click", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "typing-extensions (>=3.10.0.0)", "xarray"] +test-tox-coverage = ["coverage (>=5.5)"] + [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -968,6 +988,30 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "dagger-io" +version = "0.18.8" +description = "A client package for running Dagger pipelines in Python." +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "dagger_io-0.18.8-py3-none-any.whl", hash = "sha256:2093a3a7a8b5e8d3a2851fca39d55ac97409ed8f556e66294fe1976afedcf207"}, + {file = "dagger_io-0.18.8.tar.gz", hash = "sha256:09b8ac5efdcf25e9dd3be504953032b3b4e914116e8ae817053ae30412ced57e"}, +] + +[package.dependencies] +anyio = ">=3.6.2" +beartype = ">=0.18.2" +cattrs = ">=24.1.0" +gql = {version = ">=3.5.0", extras = ["httpx"]} +opentelemetry-exporter-otlp-proto-http = ">=1.23.0" +opentelemetry-sdk = ">=1.23.0" +platformdirs = ">=2.6.2" +rich = ">=10.11.0" +typing-extensions = ">=4.13.0" + [[package]] name = "dataclasses-json" version = "0.6.7" @@ -985,6 +1029,25 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" +[[package]] +name = "deprecated" +version = "1.2.18" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, + {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] + [[package]] name = "deptry" version = "0.23.0" @@ -1475,6 +1538,50 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0)"] +[[package]] +name = "gql" +version = "3.5.2" +description = "GraphQL client for Python" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "gql-3.5.2-py2.py3-none-any.whl", hash = "sha256:c830ffc38b3997b2a146317b27758305ab3d0da3bde607b49f34e32affb23ba2"}, + {file = "gql-3.5.2.tar.gz", hash = "sha256:07e1325b820c8ba9478e95de27ce9f23250486e7e79113dbb7659a442dc13e74"}, +] + +[package.dependencies] +anyio = ">=3.0,<5" +backoff = ">=1.11.1,<3.0" +graphql-core = ">=3.2,<3.2.5" +httpx = {version = ">=0.23.1,<1", optional = true, markers = "extra == \"httpx\""} +yarl = ">=1.6,<2.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] +all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] +botocore = ["botocore (>=1.21,<2)"] +dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] +httpx = ["httpx (>=0.23.1,<1)"] +requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] +test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] +test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)"] +websockets = ["websockets (>=10,<12)"] + +[[package]] +name = "graphql-core" +version = "3.2.4" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "graphql-core-3.2.4.tar.gz", hash = "sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264"}, + {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, +] + [[package]] name = "greenlet" version = "3.1.1" @@ -1626,7 +1733,7 @@ protobuf = ">=4.21.6" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = true +optional = false python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1639,7 +1746,7 @@ files = [ name = "httpcore" version = "1.0.7" description = "A minimal low-level HTTP client." -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1662,7 +1769,7 @@ trio = ["trio (>=0.22.0,<1.0)"] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1704,10 +1811,10 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, @@ -2584,10 +2691,10 @@ test = ["Cython", "greenlet", "ipython", "packaging", "pytest", "pytest-cov", "p name = "multidict" version = "6.1.0" description = "multidict implementation" -optional = true +optional = false python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -2883,6 +2990,112 @@ files = [ [package.dependencies] et-xmlfile = "*" +[[package]] +name = "opentelemetry-api" +version = "1.33.1" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83"}, + {file = "opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<8.7.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.33.1" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.33.1-py3-none-any.whl", hash = "sha256:b81c1de1ad349785e601d02715b2d29d6818aed2c809c20219f3d1f20b038c36"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131"}, +] + +[package.dependencies] +opentelemetry-proto = "1.33.1" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.33.1" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.33.1-py3-none-any.whl", hash = "sha256:ebd6c523b89a2ecba0549adb92537cc2bf647b4ee61afbbd5a4c6535aa3da7cf"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.33.1" +opentelemetry-proto = "1.33.1" +opentelemetry-sdk = ">=1.33.1,<1.34.0" +requests = ">=2.7,<3.0" + +[[package]] +name = "opentelemetry-proto" +version = "1.33.1" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_proto-1.33.1-py3-none-any.whl", hash = "sha256:243d285d9f29663fc7ea91a7171fcc1ccbbfff43b48df0774fd64a37d98eda70"}, + {file = "opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68"}, +] + +[package.dependencies] +protobuf = ">=5.0,<6.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.33.1" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112"}, + {file = "opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531"}, +] + +[package.dependencies] +opentelemetry-api = "1.33.1" +opentelemetry-semantic-conventions = "0.54b1" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.54b1" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d"}, + {file = "opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.33.1" + [[package]] name = "orjson" version = "3.10.15" @@ -3313,10 +3526,10 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "propcache" version = "0.2.1" description = "Accelerated property cache" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -4937,7 +5150,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -optional = true +optional = false python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -5636,6 +5849,96 @@ files = [ [package.dependencies] tzdata = {version = ">=2020.1", markers = "sys_platform == \"win32\""} +[[package]] +name = "wrapt" +version = "1.17.2" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, +] + [[package]] name = "xlsxwriter" version = "3.2.0" @@ -5666,10 +5969,10 @@ files = [ name = "yarl" version = "1.18.3" description = "Yet another URL library" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -5764,10 +6067,10 @@ propcache = ">=0.2.0" name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" -optional = true +optional = false python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -5790,4 +6093,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "3462b02e03e9f7edd9dec23acde60ad4f7128ace08ce1787556aa0b4bcd25a86" +content-hash = "3bec06acc91f12e14b86b1ae4e863169a45234c38fcc5d24ce9a89d6e10301cf" diff --git a/pyproject.toml b/pyproject.toml index 938145e95..f0c6ef6af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,7 @@ sqlalchemy = {version = "^2.0,!=2.0.36", optional = true } xmltodict = ">=0.13,<0.15" anyascii = "^0.3.2" whenever = "^0.6.16" +dagger-io = "^0.18.8" [tool.poetry.group.dev.dependencies] freezegun = "*" From 2b684e79fa1060d05e1d2fb881cfca6db5bed527 Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 19 May 2025 14:22:55 -0700 Subject: [PATCH 40/56] remove dagger-io --- poetry.lock | 335 +++---------------------------------------------- pyproject.toml | 1 - 2 files changed, 16 insertions(+), 320 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5456d5387..36199b737 100644 --- a/poetry.lock +++ b/poetry.lock @@ -172,7 +172,7 @@ files = [ name = "anyio" version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -284,26 +284,6 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] -[[package]] -name = "beartype" -version = "0.20.2" -description = "Unbearably fast near-real-time hybrid runtime-static type-checking in pure Python." -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "beartype-0.20.2-py3-none-any.whl", hash = "sha256:5171a91ecf01438a59884f0cde37d2d5da2c992198b53d6ba31db3940f47ff04"}, - {file = "beartype-0.20.2.tar.gz", hash = "sha256:38c60c065ad99364a8c767e8a0e71ba8263d467b91414ed5dcffb7758a2e8079"}, -] - -[package.extras] -dev = ["autoapi (>=0.9.0)", "click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] -doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test = ["click", "coverage (>=5.5)", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)", "xarray"] -test-tox = ["click", "equinox", "jax[cpu]", "jaxtyping", "langchain", "mypy (>=0.800)", "nuitka (>=1.2.6)", "numba", "numpy", "pandera", "pygments", "pyright (>=1.1.370)", "pytest (>=4.0.0)", "rich-click", "sphinx", "typing-extensions (>=3.10.0.0)", "xarray"] -test-tox-coverage = ["coverage (>=5.5)"] - [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -988,30 +968,6 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] -[[package]] -name = "dagger-io" -version = "0.18.8" -description = "A client package for running Dagger pipelines in Python." -optional = false -python-versions = ">=3.10" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "dagger_io-0.18.8-py3-none-any.whl", hash = "sha256:2093a3a7a8b5e8d3a2851fca39d55ac97409ed8f556e66294fe1976afedcf207"}, - {file = "dagger_io-0.18.8.tar.gz", hash = "sha256:09b8ac5efdcf25e9dd3be504953032b3b4e914116e8ae817053ae30412ced57e"}, -] - -[package.dependencies] -anyio = ">=3.6.2" -beartype = ">=0.18.2" -cattrs = ">=24.1.0" -gql = {version = ">=3.5.0", extras = ["httpx"]} -opentelemetry-exporter-otlp-proto-http = ">=1.23.0" -opentelemetry-sdk = ">=1.23.0" -platformdirs = ">=2.6.2" -rich = ">=10.11.0" -typing-extensions = ">=4.13.0" - [[package]] name = "dataclasses-json" version = "0.6.7" @@ -1029,25 +985,6 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" -[[package]] -name = "deprecated" -version = "1.2.18" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, - {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] - [[package]] name = "deptry" version = "0.23.0" @@ -1538,50 +1475,6 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0)"] -[[package]] -name = "gql" -version = "3.5.2" -description = "GraphQL client for Python" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "gql-3.5.2-py2.py3-none-any.whl", hash = "sha256:c830ffc38b3997b2a146317b27758305ab3d0da3bde607b49f34e32affb23ba2"}, - {file = "gql-3.5.2.tar.gz", hash = "sha256:07e1325b820c8ba9478e95de27ce9f23250486e7e79113dbb7659a442dc13e74"}, -] - -[package.dependencies] -anyio = ">=3.0,<5" -backoff = ">=1.11.1,<3.0" -graphql-core = ">=3.2,<3.2.5" -httpx = {version = ">=0.23.1,<1", optional = true, markers = "extra == \"httpx\""} -yarl = ">=1.6,<2.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] -all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] -botocore = ["botocore (>=1.21,<2)"] -dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] -httpx = ["httpx (>=0.23.1,<1)"] -requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] -test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)", "websockets (>=10,<12)"] -test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)", "vcrpy (==7.0.0)"] -websockets = ["websockets (>=10,<12)"] - -[[package]] -name = "graphql-core" -version = "3.2.4" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = false -python-versions = "<4,>=3.6" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "graphql-core-3.2.4.tar.gz", hash = "sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264"}, - {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, -] - [[package]] name = "greenlet" version = "3.1.1" @@ -1733,7 +1626,7 @@ protobuf = ">=4.21.6" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false +optional = true python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1746,7 +1639,7 @@ files = [ name = "httpcore" version = "1.0.7" description = "A minimal low-level HTTP client." -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1769,7 +1662,7 @@ trio = ["trio (>=0.22.0,<1.0)"] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -1811,10 +1704,10 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, @@ -2691,10 +2584,10 @@ test = ["Cython", "greenlet", "ipython", "packaging", "pytest", "pytest-cov", "p name = "multidict" version = "6.1.0" description = "multidict implementation" -optional = false +optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -2990,112 +2883,6 @@ files = [ [package.dependencies] et-xmlfile = "*" -[[package]] -name = "opentelemetry-api" -version = "1.33.1" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83"}, - {file = "opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<8.7.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.33.1" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.33.1-py3-none-any.whl", hash = "sha256:b81c1de1ad349785e601d02715b2d29d6818aed2c809c20219f3d1f20b038c36"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131"}, -] - -[package.dependencies] -opentelemetry-proto = "1.33.1" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.33.1" -description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.33.1-py3-none-any.whl", hash = "sha256:ebd6c523b89a2ecba0549adb92537cc2bf647b4ee61afbbd5a4c6535aa3da7cf"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.33.1" -opentelemetry-proto = "1.33.1" -opentelemetry-sdk = ">=1.33.1,<1.34.0" -requests = ">=2.7,<3.0" - -[[package]] -name = "opentelemetry-proto" -version = "1.33.1" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_proto-1.33.1-py3-none-any.whl", hash = "sha256:243d285d9f29663fc7ea91a7171fcc1ccbbfff43b48df0774fd64a37d98eda70"}, - {file = "opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68"}, -] - -[package.dependencies] -protobuf = ">=5.0,<6.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.33.1" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112"}, - {file = "opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531"}, -] - -[package.dependencies] -opentelemetry-api = "1.33.1" -opentelemetry-semantic-conventions = "0.54b1" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.54b1" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d"}, - {file = "opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -opentelemetry-api = "1.33.1" - [[package]] name = "orjson" version = "3.10.15" @@ -3526,10 +3313,10 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "propcache" version = "0.2.1" description = "Accelerated property cache" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -5150,7 +4937,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -optional = false +optional = true python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" @@ -5849,96 +5636,6 @@ files = [ [package.dependencies] tzdata = {version = ">=2020.1", markers = "sys_platform == \"win32\""} -[[package]] -name = "wrapt" -version = "1.17.2" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, - {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, - {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, - {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, - {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, - {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, - {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, - {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, - {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, - {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, - {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, - {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, - {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, - {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, - {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, - {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, - {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, -] - [[package]] name = "xlsxwriter" version = "3.2.0" @@ -5969,10 +5666,10 @@ files = [ name = "yarl" version = "1.18.3" description = "Yet another URL library" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -6067,10 +5764,10 @@ propcache = ">=0.2.0" name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false +optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vector-db-based\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -6093,4 +5790,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "3bec06acc91f12e14b86b1ae4e863169a45234c38fcc5d24ce9a89d6e10301cf" +content-hash = "3462b02e03e9f7edd9dec23acde60ad4f7128ace08ce1787556aa0b4bcd25a86" diff --git a/pyproject.toml b/pyproject.toml index f0c6ef6af..938145e95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,7 +90,6 @@ sqlalchemy = {version = "^2.0,!=2.0.36", optional = true } xmltodict = ">=0.13,<0.15" anyascii = "^0.3.2" whenever = "^0.6.16" -dagger-io = "^0.18.8" [tool.poetry.group.dev.dependencies] freezegun = "*" From 07c96e0d7a9e46c6946dbdb79e477f4c1586103f Mon Sep 17 00:00:00 2001 From: pnilan Date: Mon, 19 May 2025 14:39:29 -0700 Subject: [PATCH 41/56] fix test --- unit_tests/sources/declarative/spec/test_spec.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/unit_tests/sources/declarative/spec/test_spec.py b/unit_tests/sources/declarative/spec/test_spec.py index 0d7a33dea..e25e692a9 100644 --- a/unit_tests/sources/declarative/spec/test_spec.py +++ b/unit_tests/sources/declarative/spec/test_spec.py @@ -265,6 +265,10 @@ def test_given_already_migrated_config_no_control_message_is_emitted(migration_m def test_given_list_of_transformations_when_transform_config_then_config_is_transformed() -> None: input_config = {"planet_code": "CRSC"} + expected_config = { + "planet_name": "Coruscant", + "planet_population": 3_000_000_000_000, + } spec = component_spec( connection_specification={}, parameters={}, @@ -304,10 +308,9 @@ def test_given_list_of_transformations_when_transform_config_then_config_is_tran ), ], ) - assert spec.transform_config(input_config) == { - "planet_name": "Coruscant", - "planet_population": 3_000_000_000_000, - } + spec.transform_config(input_config) + + assert input_config == expected_config def test_given_valid_config_value_when_validating_then_no_exception_is_raised() -> None: From 2f6bb13f8dbaa482c270a51ef08c554c68c17519 Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 20 May 2025 08:33:18 -0700 Subject: [PATCH 42/56] add config migration/transformation/validation execution --- airbyte_cdk/entrypoint.py | 7 +++++ .../manifest_declarative_source.py | 29 +++++++++++++------ airbyte_cdk/sources/declarative/spec/spec.py | 7 ++--- airbyte_cdk/sources/source.py | 8 ++++- 4 files changed, 36 insertions(+), 15 deletions(-) diff --git a/airbyte_cdk/entrypoint.py b/airbyte_cdk/entrypoint.py index 76a1be32e..b6885aaff 100644 --- a/airbyte_cdk/entrypoint.py +++ b/airbyte_cdk/entrypoint.py @@ -166,6 +166,7 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: self.logger.setLevel(logging.INFO) source_spec: ConnectorSpecification = self.source.spec(self.logger) + try: with tempfile.TemporaryDirectory( # Cleanup can fail on Windows due to file locks. Ignore if so, @@ -185,6 +186,12 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: else: raw_config = self.source.read_config(parsed_args.config) config = self.source.configure(raw_config, temp_dir) + mutable_config = dict(config) + config_path = self.extract_config(sys.argv[1:]) + if config_path: + self.source.migrate_config(config_path, mutable_config) + self.source.transform_config(mutable_config) + config = mutable_config yield from [ self.airbyte_message_to_string(queued_message) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index c98372be7..1cd557119 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -8,7 +8,7 @@ from copy import deepcopy from importlib import metadata from types import ModuleType -from typing import Any, Dict, Iterator, List, Mapping, Optional, Set +from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Set, cast import yaml from jsonschema.exceptions import ValidationError @@ -57,6 +57,7 @@ ModelToComponentFactory, ) from airbyte_cdk.sources.declarative.resolvers import COMPONENTS_RESOLVER_TYPE_MAPPING +from airbyte_cdk.sources.declarative.spec.spec import Spec from airbyte_cdk.sources.message import MessageRepository from airbyte_cdk.sources.streams.core import Stream from airbyte_cdk.sources.types import ConnectionDefinition @@ -139,6 +140,12 @@ def __init__( # apply additional post-processing to the manifest self._post_process_manifest() + self._spec_component: Optional[Spec] = None + if spec := self._source_config.get("spec"): + if "type" not in spec: + spec["type"] = "Spec" + self._spec_component = self._constructor.create_component(SpecModel, spec, dict()) + @property def resolved_manifest(self) -> Mapping[str, Any]: """ @@ -255,6 +262,9 @@ def connection_checker(self) -> ConnectionChecker: ) def streams(self, config: Mapping[str, Any]) -> List[Stream]: + if self._spec_component: + self._spec_component.validate_config(config) + self._emit_manifest_debug_message( extra_args={ "source_name": self.name, @@ -286,6 +296,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: return source_streams + def migrate_config(self, config_path: Optional[Any], config: MutableMapping[str, Any]) -> None: + self._spec_component.migrate_config(config_path, config) if self._spec_component else None + + def transform_config(self, config: MutableMapping[str, Any]) -> None: + self._spec_component.transform_config(config) if self._spec_component else None + @staticmethod def _initialize_cache_for_parent_streams( stream_configs: List[Dict[str, Any]], @@ -355,14 +371,9 @@ def spec(self, logger: logging.Logger) -> ConnectorSpecification: } ) - spec = self._source_config.get("spec") - if spec: - if "type" not in spec: - spec["type"] = "Spec" - spec_component = self._constructor.create_component(SpecModel, spec, dict()) - return spec_component.generate_spec() - else: - return super().spec(logger) + return ( + self._spec_component.generate_spec() if self._spec_component else super().spec(logger) + ) def check(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: self._configure_logger_level(logger) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 945289686..99045ad83 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -69,9 +69,7 @@ def generate_spec(self) -> ConnectorSpecification: # We remap these keys to camel case because that's the existing format expected by the rest of the platform return ConnectorSpecificationSerializer.load(obj) - def migrate_config( - self, args: List[str], source: Source, config: MutableMapping[str, Any] - ) -> None: + def migrate_config(self, config_path: Optional[Any], config: Mapping[str, Any]) -> None: """ Apply all specified config transformations to the provided config and save the modified config to the given path and emit a control message. @@ -79,7 +77,6 @@ def migrate_config( :param source: Source instance :param config: The user-provided config to migrate """ - config_path = AirbyteEntrypoint(source).extract_config(args) if not config_path: return @@ -107,7 +104,7 @@ def transform_config(self, config: MutableMapping[str, Any]) -> None: for transformation in self.config_transformations: transformation.transform(config) - def validate_config(self, config: MutableMapping[str, Any]) -> None: + def validate_config(self, config: Mapping[str, Any]) -> None: """ Apply all config validations to the provided config. diff --git a/airbyte_cdk/sources/source.py b/airbyte_cdk/sources/source.py index 2958d82ca..154f7c0da 100644 --- a/airbyte_cdk/sources/source.py +++ b/airbyte_cdk/sources/source.py @@ -5,7 +5,7 @@ import logging from abc import ABC, abstractmethod -from typing import Any, Generic, Iterable, List, Mapping, Optional, TypeVar +from typing import Any, Generic, Iterable, List, Mapping, MutableMapping, Optional, TypeVar from airbyte_cdk.connector import BaseConnector, DefaultConnectorMixin, TConfig from airbyte_cdk.models import ( @@ -93,3 +93,9 @@ def read_catalog(cls, catalog_path: str) -> ConfiguredAirbyteCatalog: def name(self) -> str: """Source name""" return self.__class__.__name__ + + def migrate_config(self, config_path: str, config: MutableMapping[str, Any]) -> None: + pass + + def transform_config(self, config: MutableMapping[str, Any]) -> None: + pass From 9530e663af3d307a565aad047429b1f68d20139e Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 20 May 2025 08:39:51 -0700 Subject: [PATCH 43/56] fix merge error --- airbyte_cdk/sources/declarative/spec/spec.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 23b6b32bd..99045ad83 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -104,11 +104,7 @@ def transform_config(self, config: MutableMapping[str, Any]) -> None: for transformation in self.config_transformations: transformation.transform(config) -<<<<<<< HEAD def validate_config(self, config: Mapping[str, Any]) -> None: -======= - def validate_config(self, config: MutableMapping[str, Any]) -> None: ->>>>>>> main """ Apply all config validations to the provided config. From c2fdffaca948df86941e726781437399aee329d7 Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 20 May 2025 08:40:45 -0700 Subject: [PATCH 44/56] update docstring --- airbyte_cdk/sources/declarative/spec/spec.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 99045ad83..f42e7c625 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -73,8 +73,7 @@ def migrate_config(self, config_path: Optional[Any], config: Mapping[str, Any]) """ Apply all specified config transformations to the provided config and save the modified config to the given path and emit a control message. - :param args: Command line arguments - :param source: Source instance + :param config_path: The path to the config file :param config: The user-provided config to migrate """ From 1fe042e158f0e76b4a040ecf1f64048e18d1599b Mon Sep 17 00:00:00 2001 From: pnilan Date: Tue, 20 May 2025 11:24:18 -0700 Subject: [PATCH 45/56] update config path --- airbyte_cdk/entrypoint.py | 2 +- .../sources/declarative/manifest_declarative_source.py | 4 ++-- airbyte_cdk/sources/declarative/spec/spec.py | 2 +- airbyte_cdk/sources/source.py | 8 +++++++- unit_tests/sources/declarative/spec/test_spec.py | 4 ++-- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/airbyte_cdk/entrypoint.py b/airbyte_cdk/entrypoint.py index b6885aaff..3d5db9128 100644 --- a/airbyte_cdk/entrypoint.py +++ b/airbyte_cdk/entrypoint.py @@ -187,7 +187,7 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: raw_config = self.source.read_config(parsed_args.config) config = self.source.configure(raw_config, temp_dir) mutable_config = dict(config) - config_path = self.extract_config(sys.argv[1:]) + config_path = parsed_args.config if parsed_args.config else None if config_path: self.source.migrate_config(config_path, mutable_config) self.source.transform_config(mutable_config) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index 1cd557119..51da132b6 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -8,7 +8,7 @@ from copy import deepcopy from importlib import metadata from types import ModuleType -from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Set, cast +from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Set import yaml from jsonschema.exceptions import ValidationError @@ -296,7 +296,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: return source_streams - def migrate_config(self, config_path: Optional[Any], config: MutableMapping[str, Any]) -> None: + def migrate_config(self, config_path: Optional[str], config: MutableMapping[str, Any]) -> None: self._spec_component.migrate_config(config_path, config) if self._spec_component else None def transform_config(self, config: MutableMapping[str, Any]) -> None: diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index f42e7c625..9b10a475e 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -69,7 +69,7 @@ def generate_spec(self) -> ConnectorSpecification: # We remap these keys to camel case because that's the existing format expected by the rest of the platform return ConnectorSpecificationSerializer.load(obj) - def migrate_config(self, config_path: Optional[Any], config: Mapping[str, Any]) -> None: + def migrate_config(self, config_path: Optional[str], config: Mapping[str, Any]) -> None: """ Apply all specified config transformations to the provided config and save the modified config to the given path and emit a control message. diff --git a/airbyte_cdk/sources/source.py b/airbyte_cdk/sources/source.py index 154f7c0da..f2017f9a1 100644 --- a/airbyte_cdk/sources/source.py +++ b/airbyte_cdk/sources/source.py @@ -94,8 +94,14 @@ def name(self) -> str: """Source name""" return self.__class__.__name__ - def migrate_config(self, config_path: str, config: MutableMapping[str, Any]) -> None: + def migrate_config(self, config_path: Optional[str], config: MutableMapping[str, Any]) -> None: + """ + Optional method to migrate config. + """ pass def transform_config(self, config: MutableMapping[str, Any]) -> None: + """ + Optional method to transform config. + """ pass diff --git a/unit_tests/sources/declarative/spec/test_spec.py b/unit_tests/sources/declarative/spec/test_spec.py index e25e692a9..48dc8b5c5 100644 --- a/unit_tests/sources/declarative/spec/test_spec.py +++ b/unit_tests/sources/declarative/spec/test_spec.py @@ -222,7 +222,7 @@ def test_given_unmigrated_config_when_migrating_then_config_is_migrated(migratio ) spec.message_repository = migration_mocks["message_repository"] - spec.migrate_config(["spec"], migration_mocks["source"], input_config) + spec.migrate_config("/fake/config/path", input_config) migration_mocks["message_repository"].emit_message.assert_called_once() migration_mocks["open"].assert_called_once_with("/fake/config/path", "w") @@ -252,7 +252,7 @@ def test_given_already_migrated_config_no_control_message_is_emitted(migration_m ) spec.message_repository = migration_mocks["message_repository"] - spec.migrate_config(["spec"], migration_mocks["source"], input_config) + spec.migrate_config("/fake/config/path", input_config) migration_mocks["message_repository"].emit_message.assert_not_called() migration_mocks["open"].assert_not_called() From 4c19c873ae29ead61bcfc4e091b0ca74e88a48ca Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 07:11:07 -0700 Subject: [PATCH 46/56] update config migration/transformation/validation flow --- airbyte_cdk/entrypoint.py | 7 ---- .../manifest_declarative_source.py | 34 +++++++++++++++---- airbyte_cdk/sources/declarative/spec/spec.py | 20 ++--------- airbyte_cdk/sources/source.py | 12 ------- 4 files changed, 30 insertions(+), 43 deletions(-) diff --git a/airbyte_cdk/entrypoint.py b/airbyte_cdk/entrypoint.py index 3d5db9128..09fd99970 100644 --- a/airbyte_cdk/entrypoint.py +++ b/airbyte_cdk/entrypoint.py @@ -186,13 +186,6 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: else: raw_config = self.source.read_config(parsed_args.config) config = self.source.configure(raw_config, temp_dir) - mutable_config = dict(config) - config_path = parsed_args.config if parsed_args.config else None - if config_path: - self.source.migrate_config(config_path, mutable_config) - self.source.transform_config(mutable_config) - config = mutable_config - yield from [ self.airbyte_message_to_string(queued_message) for queued_message in self._emit_queued_messages(self.source) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index 51da132b6..160d9befe 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -10,11 +10,13 @@ from types import ModuleType from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Set +import orjson import yaml from jsonschema.exceptions import ValidationError from jsonschema.validators import validate from packaging.version import InvalidVersion, Version +from airbyte_cdk.config_observation import create_connector_config_control_message from airbyte_cdk.connector_builder.models import ( LogMessage as ConnectorBuilderLogMessage, ) @@ -29,6 +31,7 @@ ConnectorSpecification, FailureType, ) +from airbyte_cdk.models.airbyte_protocol_serializers import AirbyteMessageSerializer from airbyte_cdk.sources.declarative.checks import COMPONENTS_CHECKER_TYPE_MAPPING from airbyte_cdk.sources.declarative.checks.connection_checker import ConnectionChecker from airbyte_cdk.sources.declarative.declarative_source import DeclarativeSource @@ -100,6 +103,7 @@ def __init__( component_factory: Optional[ModelToComponentFactory] = None, migrate_manifest: Optional[bool] = False, normalize_manifest: Optional[bool] = False, + config_path: Optional[str] = None, ) -> None: """ Args: @@ -109,6 +113,7 @@ def __init__( emit_connector_builder_messages: True if messages should be emitted to the connector builder. component_factory: optional factory if ModelToComponentFactory's default behavior needs to be tweaked. normalize_manifest: Optional flag to indicate if the manifest should be normalized. + config_path: Optional path to the config file. """ self.logger = logging.getLogger(f"airbyte.{self.name}") self._should_normalize = normalize_manifest @@ -131,7 +136,6 @@ def __init__( self._slice_logger: SliceLogger = ( AlwaysLogSliceLogger() if emit_connector_builder_messages else DebugSliceLogger() ) - self._config = config or {} # resolve all components in the manifest self._source_config = self._pre_process_manifest(dict(source_config)) @@ -140,11 +144,33 @@ def __init__( # apply additional post-processing to the manifest self._post_process_manifest() + self._config: Mapping[str, Any] self._spec_component: Optional[Spec] = None if spec := self._source_config.get("spec"): if "type" not in spec: spec["type"] = "Spec" self._spec_component = self._constructor.create_component(SpecModel, spec, dict()) + mutable_config = dict(config) if config else {} + + if config_path: + self._spec_component.migrate_config(mutable_config) + try: + if mutable_config != config: + with open(config_path, "w") as f: + json.dump(mutable_config, f) + self.message_repository.emit_message( + create_connector_config_control_message(mutable_config) + ) + # We have no mechanism for consuming the queue, so we print the messages to stdout + for message in self.message_repository.consume_queue(): + print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) + except Exception as e: + self.logger.error(f"Error migrating config: {str(e)}") + mutable_config = dict(config) if config else {} + self._spec_component.transform_config(mutable_config) + self._config = mutable_config + else: + self._config = config or {} @property def resolved_manifest(self) -> Mapping[str, Any]: @@ -296,12 +322,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: return source_streams - def migrate_config(self, config_path: Optional[str], config: MutableMapping[str, Any]) -> None: - self._spec_component.migrate_config(config_path, config) if self._spec_component else None - - def transform_config(self, config: MutableMapping[str, Any]) -> None: - self._spec_component.transform_config(config) if self._spec_component else None - @staticmethod def _initialize_cache_for_parent_streams( stream_configs: List[Dict[str, Any]], diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 9b10a475e..f68ef5979 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -69,30 +69,16 @@ def generate_spec(self) -> ConnectorSpecification: # We remap these keys to camel case because that's the existing format expected by the rest of the platform return ConnectorSpecificationSerializer.load(obj) - def migrate_config(self, config_path: Optional[str], config: Mapping[str, Any]) -> None: + def migrate_config(self, config: MutableMapping[str, Any]) -> None: """ - Apply all specified config transformations to the provided config and save the modified config to the given path and emit a control message. + Apply all specified config transformations to the provided config and emit a control message. - :param config_path: The path to the config file :param config: The user-provided config to migrate """ - if not config_path: - return - - mutable_config = dict(config) for migration in self.config_migrations: for transformation in migration.transformations: - transformation.transform(mutable_config) - - if mutable_config != config: - with open(config_path, "w") as f: - json.dump(mutable_config, f) - self.message_repository.emit_message( - create_connector_config_control_message(mutable_config) - ) - for message in self.message_repository.consume_queue(): - print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) + transformation.transform(config) def transform_config(self, config: MutableMapping[str, Any]) -> None: """ diff --git a/airbyte_cdk/sources/source.py b/airbyte_cdk/sources/source.py index f2017f9a1..cc1e24cc7 100644 --- a/airbyte_cdk/sources/source.py +++ b/airbyte_cdk/sources/source.py @@ -93,15 +93,3 @@ def read_catalog(cls, catalog_path: str) -> ConfiguredAirbyteCatalog: def name(self) -> str: """Source name""" return self.__class__.__name__ - - def migrate_config(self, config_path: Optional[str], config: MutableMapping[str, Any]) -> None: - """ - Optional method to migrate config. - """ - pass - - def transform_config(self, config: MutableMapping[str, Any]) -> None: - """ - Optional method to transform config. - """ - pass From c7f090e5864141cfcff87fc49a10440d21e10dc5 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 07:12:38 -0700 Subject: [PATCH 47/56] remove unused imports from spec.py --- airbyte_cdk/sources/declarative/spec/spec.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index f68ef5979..341fc909e 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -1,28 +1,21 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -import json from dataclasses import InitVar, dataclass, field from typing import Any, List, Mapping, MutableMapping, Optional -import orjson - -from airbyte_cdk.config_observation import create_connector_config_control_message -from airbyte_cdk.entrypoint import AirbyteEntrypoint from airbyte_cdk.models import ( AdvancedAuth, ConnectorSpecification, ConnectorSpecificationSerializer, ) -from airbyte_cdk.models.airbyte_protocol_serializers import AirbyteMessageSerializer from airbyte_cdk.sources.declarative.models.declarative_component_schema import AuthFlow from airbyte_cdk.sources.declarative.transformations.config_transformations.config_transformation import ( ConfigTransformation, ) from airbyte_cdk.sources.declarative.validators.validator import Validator from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository -from airbyte_cdk.sources.source import Source @dataclass From bdcacbb3dc876a219e16242fa4ed427013cd79ee Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 09:50:25 -0700 Subject: [PATCH 48/56] fix migration tests --- .../manifest_declarative_source.py | 4 +- airbyte_cdk/sources/declarative/spec/spec.py | 2 - .../sources/declarative/spec/test_spec.py | 104 ---------- .../test_manifest_declarative_source.py | 182 +++++++++++++++++- 4 files changed, 184 insertions(+), 108 deletions(-) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index 160d9befe..a701943cf 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -62,6 +62,7 @@ from airbyte_cdk.sources.declarative.resolvers import COMPONENTS_RESOLVER_TYPE_MAPPING from airbyte_cdk.sources.declarative.spec.spec import Spec from airbyte_cdk.sources.message import MessageRepository +from airbyte_cdk.sources.message.repository import InMemoryMessageRepository from airbyte_cdk.sources.streams.core import Stream from airbyte_cdk.sources.types import ConnectionDefinition from airbyte_cdk.sources.utils.slice_logger import ( @@ -146,7 +147,8 @@ def __init__( self._config: Mapping[str, Any] self._spec_component: Optional[Spec] = None - if spec := self._source_config.get("spec"): + spec = self._source_config.get("spec") + if spec: if "type" not in spec: spec["type"] = "Spec" self._spec_component = self._constructor.create_component(SpecModel, spec, dict()) diff --git a/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte_cdk/sources/declarative/spec/spec.py index 341fc909e..20fb3c5c9 100644 --- a/airbyte_cdk/sources/declarative/spec/spec.py +++ b/airbyte_cdk/sources/declarative/spec/spec.py @@ -41,7 +41,6 @@ class Spec: config_migrations: List[ConfigMigration] = field(default_factory=list) config_transformations: List[ConfigTransformation] = field(default_factory=list) config_validations: List[Validator] = field(default_factory=list) - message_repository: MessageRepository = InMemoryMessageRepository() def generate_spec(self) -> ConnectorSpecification: """ @@ -68,7 +67,6 @@ def migrate_config(self, config: MutableMapping[str, Any]) -> None: :param config: The user-provided config to migrate """ - for migration in self.config_migrations: for transformation in migration.transformations: transformation.transform(config) diff --git a/unit_tests/sources/declarative/spec/test_spec.py b/unit_tests/sources/declarative/spec/test_spec.py index 48dc8b5c5..75287082c 100644 --- a/unit_tests/sources/declarative/spec/test_spec.py +++ b/unit_tests/sources/declarative/spec/test_spec.py @@ -2,8 +2,6 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. # -from unittest.mock import Mock, mock_open - import pytest from airbyte_cdk.models import ( @@ -161,108 +159,6 @@ def test_spec(spec, expected_connection_specification) -> None: assert spec.generate_spec() == expected_connection_specification -@pytest.fixture -def migration_mocks(monkeypatch): - mock_message_repository = Mock() - mock_message_repository.consume_queue.return_value = [Mock()] - - mock_source = Mock() - mock_entrypoint = Mock() - mock_entrypoint.extract_config.return_value = "/fake/config/path" - monkeypatch.setattr( - "airbyte_cdk.sources.declarative.spec.spec.AirbyteEntrypoint", lambda _: mock_entrypoint - ) - - _mock_open = mock_open() - mock_json_dump = Mock() - mock_print = Mock() - mock_serializer_dump = Mock() - - mock_decoded_bytes = Mock() - mock_decoded_bytes.decode.return_value = "decoded_message" - mock_orjson_dumps = Mock(return_value=mock_decoded_bytes) - - monkeypatch.setattr("builtins.open", _mock_open) - monkeypatch.setattr("json.dump", mock_json_dump) - monkeypatch.setattr("builtins.print", mock_print) - monkeypatch.setattr( - "airbyte_cdk.models.airbyte_protocol_serializers.AirbyteMessageSerializer.dump", - mock_serializer_dump, - ) - monkeypatch.setattr("airbyte_cdk.sources.declarative.spec.spec.orjson.dumps", mock_orjson_dumps) - - return { - "message_repository": mock_message_repository, - "source": mock_source, - "open": _mock_open, - "json_dump": mock_json_dump, - "print": mock_print, - "serializer_dump": mock_serializer_dump, - "orjson_dumps": mock_orjson_dumps, - "decoded_bytes": mock_decoded_bytes, - } - - -def test_given_unmigrated_config_when_migrating_then_config_is_migrated(migration_mocks) -> None: - input_config = {"planet": "CRSC"} - - spec = component_spec( - connection_specification={}, - parameters={}, - config_migrations=[ - ConfigMigration( - description="Test migration", - transformations=[ - ConfigRemapField( - map={"CRSC": "Coruscant"}, field_path=["planet"], config=input_config - ) - ], - ) - ], - ) - spec.message_repository = migration_mocks["message_repository"] - - spec.migrate_config("/fake/config/path", input_config) - - migration_mocks["message_repository"].emit_message.assert_called_once() - migration_mocks["open"].assert_called_once_with("/fake/config/path", "w") - migration_mocks["json_dump"].assert_called_once() - migration_mocks["print"].assert_called() - migration_mocks["serializer_dump"].assert_called() - migration_mocks["orjson_dumps"].assert_called() - migration_mocks["decoded_bytes"].decode.assert_called() - - -def test_given_already_migrated_config_no_control_message_is_emitted(migration_mocks) -> None: - input_config = {"planet": "Coruscant"} - - spec = component_spec( - connection_specification={}, - parameters={}, - config_migrations=[ - ConfigMigration( - description="Test migration", - transformations=[ - ConfigRemapField( - map={"CRSC": "Coruscant"}, field_path=["planet"], config=input_config - ) - ], - ) - ], - ) - spec.message_repository = migration_mocks["message_repository"] - - spec.migrate_config("/fake/config/path", input_config) - - migration_mocks["message_repository"].emit_message.assert_not_called() - migration_mocks["open"].assert_not_called() - migration_mocks["json_dump"].assert_not_called() - migration_mocks["print"].assert_not_called() - migration_mocks["serializer_dump"].assert_not_called() - migration_mocks["orjson_dumps"].assert_not_called() - migration_mocks["decoded_bytes"].decode.assert_not_called() - - def test_given_list_of_transformations_when_transform_config_then_config_is_transformed() -> None: input_config = {"planet_code": "CRSC"} expected_config = { diff --git a/unit_tests/sources/declarative/test_manifest_declarative_source.py b/unit_tests/sources/declarative/test_manifest_declarative_source.py index d6de73db8..91f585433 100644 --- a/unit_tests/sources/declarative/test_manifest_declarative_source.py +++ b/unit_tests/sources/declarative/test_manifest_declarative_source.py @@ -9,7 +9,7 @@ from copy import deepcopy from pathlib import Path from typing import Any, List, Mapping -from unittest.mock import call, patch +from unittest.mock import Mock, call, mock_open, patch import pytest import requests @@ -30,6 +30,9 @@ ) from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource +from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import ( + ModelToComponentFactory, +) from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever logger = logging.getLogger("airbyte") @@ -2099,3 +2102,180 @@ def test_slice_checkpoint(test_name, manifest, pages, expected_states_qty): with patch.object(SimpleRetriever, "_fetch_next_page", side_effect=pages): states = [message.state for message in _run_read(manifest, _stream_name) if message.state] assert len(states) == expected_states_qty + + +@pytest.fixture +def migration_mocks(monkeypatch): + mock_message_repository = Mock() + mock_message_repository.consume_queue.return_value = [Mock()] + + _mock_open = mock_open() + mock_json_dump = Mock() + mock_print = Mock() + mock_serializer_dump = Mock() + + mock_decoded_bytes = Mock() + mock_decoded_bytes.decode.return_value = "decoded_message" + mock_orjson_dumps = Mock(return_value=mock_decoded_bytes) + + monkeypatch.setattr("builtins.open", _mock_open) + monkeypatch.setattr("json.dump", mock_json_dump) + monkeypatch.setattr("builtins.print", mock_print) + monkeypatch.setattr( + "airbyte_cdk.models.airbyte_protocol_serializers.AirbyteMessageSerializer.dump", + mock_serializer_dump, + ) + monkeypatch.setattr( + "airbyte_cdk.sources.declarative.manifest_declarative_source.orjson.dumps", + mock_orjson_dumps, + ) + + return { + "message_repository": mock_message_repository, + "open": _mock_open, + "json_dump": mock_json_dump, + "print": mock_print, + "serializer_dump": mock_serializer_dump, + "orjson_dumps": mock_orjson_dumps, + "decoded_bytes": mock_decoded_bytes, + } + + +def test_given_unmigrated_config_when_migrating_then_config_is_migrated(migration_mocks) -> None: + input_config = {"planet": "CRSC"} + + manifest = { + "version": "0.34.2", + "type": "DeclarativeSource", + "check": {"type": "CheckStream", "stream_names": ["Test"]}, + "streams": [ + { + "type": "DeclarativeStream", + "name": "Test", + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": {"type": "object"}, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://example.org", + "path": "/test", + "authenticator": {"type": "NoAuth"}, + }, + "record_selector": { + "type": "RecordSelector", + "extractor": {"type": "DpathExtractor", "field_path": []}, + }, + }, + } + ], + "spec": { + "type": "Spec", + "documentation_url": "https://example.org", + "connection_specification": {}, + "config_normalization_rules": { + "config_migrations": [ + { + "type": "ConfigMigration", + "description": "Test migration", + "transformations": [ + { + "type": "ConfigRemapField", + "map": {"CRSC": "Coruscant"}, + "field_path": ["planet"], + } + ], + } + ], + }, + }, + } + + ManifestDeclarativeSource( + source_config=manifest, + config=input_config, + config_path="/fake/config/path", + component_factory=ModelToComponentFactory( + message_repository=migration_mocks["message_repository"], + ), + ) + + migration_mocks["message_repository"].emit_message.assert_called_once() + migration_mocks["open"].assert_called_once_with("/fake/config/path", "w") + migration_mocks["json_dump"].assert_called_once() + migration_mocks["print"].assert_called() + migration_mocks["serializer_dump"].assert_called() + migration_mocks["orjson_dumps"].assert_called() + migration_mocks["decoded_bytes"].decode.assert_called() + + +def test_given_already_migrated_config_no_control_message_is_emitted(migration_mocks) -> None: + input_config = {"planet": "Coruscant"} + + manifest = { + "version": "0.34.2", + "type": "DeclarativeSource", + "check": {"type": "CheckStream", "stream_names": ["Test"]}, + "streams": [ + { + "type": "DeclarativeStream", + "name": "Test", + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": {"type": "object"}, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://example.org", + "path": "/test", + "authenticator": {"type": "NoAuth"}, + }, + "record_selector": { + "type": "RecordSelector", + "extractor": {"type": "DpathExtractor", "field_path": []}, + }, + }, + } + ], + "spec": { + "type": "Spec", + "documentation_url": "https://example.org", + "connection_specification": {}, + "config_normalization_rules": { + "config_migrations": [ + { + "type": "ConfigMigration", + "description": "Test migration", + "transformations": [ + { + "type": "ConfigRemapField", + "map": {"CRSC": "Coruscant"}, + "field_path": ["planet"], + } + ], + } + ], + }, + }, + } + + ManifestDeclarativeSource( + source_config=manifest, + config=input_config, + config_path="/fake/config/path", + component_factory=ModelToComponentFactory( + message_repository=migration_mocks["message_repository"], + ), + ) + + migration_mocks["message_repository"].emit_message.assert_not_called() + migration_mocks["open"].assert_not_called() + migration_mocks["json_dump"].assert_not_called() + migration_mocks["print"].assert_not_called() + migration_mocks["serializer_dump"].assert_not_called() + migration_mocks["orjson_dumps"].assert_not_called() + migration_mocks["decoded_bytes"].decode.assert_not_called() From b945a3c14abdf2405a0547bc80a340b81cee8857 Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 10:17:25 -0700 Subject: [PATCH 49/56] add manifest declarative source validation test --- .../test_manifest_declarative_source.py | 128 ++++++++++++++++++ 1 file changed, 128 insertions(+) diff --git a/unit_tests/sources/declarative/test_manifest_declarative_source.py b/unit_tests/sources/declarative/test_manifest_declarative_source.py index 91f585433..8a7d46a28 100644 --- a/unit_tests/sources/declarative/test_manifest_declarative_source.py +++ b/unit_tests/sources/declarative/test_manifest_declarative_source.py @@ -2279,3 +2279,131 @@ def test_given_already_migrated_config_no_control_message_is_emitted(migration_m migration_mocks["serializer_dump"].assert_not_called() migration_mocks["orjson_dumps"].assert_not_called() migration_mocks["decoded_bytes"].decode.assert_not_called() + + +def test_given_valid_config_streams_validates_config_and_does_not_raise(): + input_config = {"schema_to_validate": {"planet": "Coruscant"}} + + manifest = { + "version": "0.34.2", + "type": "DeclarativeSource", + "check": {"type": "CheckStream", "stream_names": ["Test"]}, + "streams": [ + { + "type": "DeclarativeStream", + "name": "Test", + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": {"type": "object"}, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://example.org", + "path": "/test", + "authenticator": {"type": "NoAuth"}, + }, + "record_selector": { + "type": "RecordSelector", + "extractor": {"type": "DpathExtractor", "field_path": []}, + }, + }, + } + ], + "spec": { + "type": "Spec", + "documentation_url": "https://example.org", + "connection_specification": {}, + "parameters": {}, + "config_normalization_rules": { + "validations": [ + { + "type": "DpathValidator", + "field_path": ["schema_to_validate"], + "validation_strategy": { + "type": "ValidateAdheresToSchema", + "base_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Test Spec", + "type": "object", + "properties": {"planet": {"type": "string"}}, + "required": ["planet"], + "additionalProperties": False, + }, + }, + } + ], + }, + }, + } + + source = ManifestDeclarativeSource( + source_config=manifest, + ) + + source.streams(input_config) + + +def test_given_invalid_config_streams_validates_config_and_raises(): + input_config = {"schema_to_validate": {"will_fail": "Coruscant"}} + + manifest = { + "version": "0.34.2", + "type": "DeclarativeSource", + "check": {"type": "CheckStream", "stream_names": ["Test"]}, + "streams": [ + { + "type": "DeclarativeStream", + "name": "Test", + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": {"type": "object"}, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://example.org", + "path": "/test", + "authenticator": {"type": "NoAuth"}, + }, + "record_selector": { + "type": "RecordSelector", + "extractor": {"type": "DpathExtractor", "field_path": []}, + }, + }, + } + ], + "spec": { + "type": "Spec", + "documentation_url": "https://example.org", + "connection_specification": {}, + "parameters": {}, + "config_normalization_rules": { + "validations": [ + { + "type": "DpathValidator", + "field_path": ["schema_to_validate"], + "validation_strategy": { + "type": "ValidateAdheresToSchema", + "base_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Test Spec", + "type": "object", + "properties": {"planet": {"type": "string"}}, + "required": ["planet"], + "additionalProperties": False, + }, + }, + } + ], + }, + }, + } + source = ManifestDeclarativeSource( + source_config=manifest, + ) + + with pytest.raises(ValueError): + source.streams(input_config) From 7646d2224952c37d4139e32204fa3f6d500c4aaf Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 10:19:51 -0700 Subject: [PATCH 50/56] add transformation test --- .../test_manifest_declarative_source.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/unit_tests/sources/declarative/test_manifest_declarative_source.py b/unit_tests/sources/declarative/test_manifest_declarative_source.py index 8a7d46a28..f48d8fd83 100644 --- a/unit_tests/sources/declarative/test_manifest_declarative_source.py +++ b/unit_tests/sources/declarative/test_manifest_declarative_source.py @@ -2281,6 +2281,60 @@ def test_given_already_migrated_config_no_control_message_is_emitted(migration_m migration_mocks["decoded_bytes"].decode.assert_not_called() +def test_given_transformations_config_is_transformed(): + input_config = {"planet": "Coruscant"} + + manifest = { + "version": "0.34.2", + "type": "DeclarativeSource", + "check": {"type": "CheckStream", "stream_names": ["Test"]}, + "streams": [ + { + "type": "DeclarativeStream", + "name": "Test", + "schema_loader": { + "type": "InlineSchemaLoader", + "schema": {"type": "object"}, + }, + "retriever": { + "type": "SimpleRetriever", + "requester": { + "type": "HttpRequester", + "url_base": "https://example.org", + "path": "/test", + "authenticator": {"type": "NoAuth"}, + }, + "record_selector": { + "type": "RecordSelector", + "extractor": {"type": "DpathExtractor", "field_path": []}, + }, + }, + } + ], + "spec": { + "type": "Spec", + "documentation_url": "https://example.org", + "connection_specification": {}, + "config_normalization_rules": { + "transformations": [ + { + "type": "ConfigRemapField", + "map": {"CRSC": "Coruscant"}, + "field_path": ["planet"], + } + ], + }, + }, + } + + source = ManifestDeclarativeSource( + source_config=manifest, + config=input_config, + ) + + assert source._config == {"planet": "Coruscant"} + + def test_given_valid_config_streams_validates_config_and_does_not_raise(): input_config = {"schema_to_validate": {"planet": "Coruscant"}} From 0b02e767d1aa3232e2bf08cb6f36bc1294c1308e Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 10:25:12 -0700 Subject: [PATCH 51/56] fix errant imports --- airbyte_cdk/entrypoint.py | 2 +- .../sources/declarative/manifest_declarative_source.py | 5 ++--- airbyte_cdk/sources/source.py | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/airbyte_cdk/entrypoint.py b/airbyte_cdk/entrypoint.py index 09fd99970..76a1be32e 100644 --- a/airbyte_cdk/entrypoint.py +++ b/airbyte_cdk/entrypoint.py @@ -166,7 +166,6 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: self.logger.setLevel(logging.INFO) source_spec: ConnectorSpecification = self.source.spec(self.logger) - try: with tempfile.TemporaryDirectory( # Cleanup can fail on Windows due to file locks. Ignore if so, @@ -186,6 +185,7 @@ def run(self, parsed_args: argparse.Namespace) -> Iterable[str]: else: raw_config = self.source.read_config(parsed_args.config) config = self.source.configure(raw_config, temp_dir) + yield from [ self.airbyte_message_to_string(queued_message) for queued_message in self._emit_queued_messages(self.source) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index a701943cf..cd588a892 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2025 Airbyte, Inc., all rights reserved. # import json @@ -8,7 +8,7 @@ from copy import deepcopy from importlib import metadata from types import ModuleType -from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Set +from typing import Any, Dict, Iterator, List, Mapping, Optional, Set import orjson import yaml @@ -62,7 +62,6 @@ from airbyte_cdk.sources.declarative.resolvers import COMPONENTS_RESOLVER_TYPE_MAPPING from airbyte_cdk.sources.declarative.spec.spec import Spec from airbyte_cdk.sources.message import MessageRepository -from airbyte_cdk.sources.message.repository import InMemoryMessageRepository from airbyte_cdk.sources.streams.core import Stream from airbyte_cdk.sources.types import ConnectionDefinition from airbyte_cdk.sources.utils.slice_logger import ( diff --git a/airbyte_cdk/sources/source.py b/airbyte_cdk/sources/source.py index cc1e24cc7..2958d82ca 100644 --- a/airbyte_cdk/sources/source.py +++ b/airbyte_cdk/sources/source.py @@ -5,7 +5,7 @@ import logging from abc import ABC, abstractmethod -from typing import Any, Generic, Iterable, List, Mapping, MutableMapping, Optional, TypeVar +from typing import Any, Generic, Iterable, List, Mapping, Optional, TypeVar from airbyte_cdk.connector import BaseConnector, DefaultConnectorMixin, TConfig from airbyte_cdk.models import ( From a07aa39cd0712e0fc79627c302a838c49ef9e32e Mon Sep 17 00:00:00 2001 From: pnilan Date: Wed, 21 May 2025 15:58:08 -0700 Subject: [PATCH 52/56] add config_path param --- airbyte_cdk/cli/source_declarative_manifest/_run.py | 2 ++ .../sources/declarative/concurrent_declarative_source.py | 2 ++ airbyte_cdk/sources/declarative/yaml_declarative_source.py | 2 ++ 3 files changed, 6 insertions(+) diff --git a/airbyte_cdk/cli/source_declarative_manifest/_run.py b/airbyte_cdk/cli/source_declarative_manifest/_run.py index df36b3df1..b2c24fe5c 100644 --- a/airbyte_cdk/cli/source_declarative_manifest/_run.py +++ b/airbyte_cdk/cli/source_declarative_manifest/_run.py @@ -58,6 +58,7 @@ def __init__( catalog: ConfiguredAirbyteCatalog | None, config: MutableMapping[str, Any] | None, state: TState, + config_path: str | None = None, **kwargs: Any, ) -> None: """ @@ -76,6 +77,7 @@ def __init__( config=config, state=state, # type: ignore [arg-type] path_to_yaml="manifest.yaml", + config_path=config_path, ) diff --git a/airbyte_cdk/sources/declarative/concurrent_declarative_source.py b/airbyte_cdk/sources/declarative/concurrent_declarative_source.py index 572e487dc..ba08f8b94 100644 --- a/airbyte_cdk/sources/declarative/concurrent_declarative_source.py +++ b/airbyte_cdk/sources/declarative/concurrent_declarative_source.py @@ -74,6 +74,7 @@ def __init__( debug: bool = False, emit_connector_builder_messages: bool = False, component_factory: Optional[ModelToComponentFactory] = None, + config_path: Optional[str] = None, **kwargs: Any, ) -> None: # todo: We could remove state from initialization. Now that streams are grouped during the read(), a source @@ -96,6 +97,7 @@ def __init__( debug=debug, emit_connector_builder_messages=emit_connector_builder_messages, component_factory=component_factory, + config_path=config_path, ) concurrency_level_from_manifest = self._source_config.get("concurrency_level") diff --git a/airbyte_cdk/sources/declarative/yaml_declarative_source.py b/airbyte_cdk/sources/declarative/yaml_declarative_source.py index 93bdc55e9..003578738 100644 --- a/airbyte_cdk/sources/declarative/yaml_declarative_source.py +++ b/airbyte_cdk/sources/declarative/yaml_declarative_source.py @@ -24,6 +24,7 @@ def __init__( catalog: Optional[ConfiguredAirbyteCatalog] = None, config: Optional[Mapping[str, Any]] = None, state: Optional[List[AirbyteStateMessage]] = None, + config_path: Optional[str] = None, ) -> None: """ :param path_to_yaml: Path to the yaml file describing the source @@ -36,6 +37,7 @@ def __init__( config=config or {}, state=state or [], source_config=source_config, + config_path=config_path, ) def _read_and_parse_yaml_file(self, path_to_yaml_file: str) -> ConnectionDefinition: From f8199e19d90c4f7580debcc40f373214da4fcb22 Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 22 May 2025 06:46:39 -0700 Subject: [PATCH 53/56] update per comments --- .../manifest_declarative_source.py | 55 +++++++++---------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index cd588a892..427d840a7 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -8,7 +8,7 @@ from copy import deepcopy from importlib import metadata from types import ModuleType -from typing import Any, Dict, Iterator, List, Mapping, Optional, Set +from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Set import orjson import yaml @@ -145,33 +145,13 @@ def __init__( self._post_process_manifest() self._config: Mapping[str, Any] - self._spec_component: Optional[Spec] = None - spec = self._source_config.get("spec") - if spec: - if "type" not in spec: - spec["type"] = "Spec" - self._spec_component = self._constructor.create_component(SpecModel, spec, dict()) - mutable_config = dict(config) if config else {} - - if config_path: - self._spec_component.migrate_config(mutable_config) - try: - if mutable_config != config: - with open(config_path, "w") as f: - json.dump(mutable_config, f) - self.message_repository.emit_message( - create_connector_config_control_message(mutable_config) - ) - # We have no mechanism for consuming the queue, so we print the messages to stdout - for message in self.message_repository.consume_queue(): - print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) - except Exception as e: - self.logger.error(f"Error migrating config: {str(e)}") - mutable_config = dict(config) if config else {} - self._spec_component.transform_config(mutable_config) - self._config = mutable_config - else: - self._config = config or {} + self._spec_component: Spec + spec: Mapping[str, Any] = self._source_config["spec"] + self._spec_component = self._constructor.create_component(SpecModel, spec, dict()) + mutable_config = dict(config) if config else {} + self._migrate_config(config_path, mutable_config, config) + self._spec_component.transform_config(mutable_config) + self._config = mutable_config @property def resolved_manifest(self) -> Mapping[str, Any]: @@ -233,6 +213,25 @@ def _normalize_manifest(self) -> None: normalizer = ManifestNormalizer(self._source_config, self._declarative_component_schema) self._source_config = normalizer.normalize() + def _migrate_config( + self, + config_path: Optional[str], + mutable_config: MutableMapping[str, Any], + config: Optional[Mapping[str, Any]], + ) -> None: + if config_path and config: + self._spec_component.migrate_config(mutable_config) + if mutable_config != config: + if config_path: + with open(config_path, "w") as f: + json.dump(mutable_config, f) + self.message_repository.emit_message( + create_connector_config_control_message(mutable_config) + ) + # We have no mechanism for consuming the queue, so we print the messages to stdout + for message in self.message_repository.consume_queue(): + print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) + def _migrate_manifest(self) -> None: """ This method is used to migrate the manifest. It should be called after the manifest has been validated. From 4e2e8fc572d61582b551eb088c33cc339f9b77dd Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 22 May 2025 07:35:36 -0700 Subject: [PATCH 54/56] revert to include spec/spec_component check --- .../declarative/manifest_declarative_source.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index 427d840a7..408479295 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -145,12 +145,15 @@ def __init__( self._post_process_manifest() self._config: Mapping[str, Any] - self._spec_component: Spec - spec: Mapping[str, Any] = self._source_config["spec"] - self._spec_component = self._constructor.create_component(SpecModel, spec, dict()) + self._spec_component: Optional[Spec] + spec: Optional[Mapping[str, Any]] = self._source_config.get("spec") + self._spec_component = ( + self._constructor.create_component(SpecModel, spec, dict()) if spec else None + ) mutable_config = dict(config) if config else {} self._migrate_config(config_path, mutable_config, config) - self._spec_component.transform_config(mutable_config) + if self._spec_component: + self._spec_component.transform_config(mutable_config) self._config = mutable_config @property @@ -219,7 +222,7 @@ def _migrate_config( mutable_config: MutableMapping[str, Any], config: Optional[Mapping[str, Any]], ) -> None: - if config_path and config: + if config_path and config and self._spec_component: self._spec_component.migrate_config(mutable_config) if mutable_config != config: if config_path: From 5ca4561014d0551ca5d142bd10975e0944a9b45d Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 22 May 2025 12:14:16 -0700 Subject: [PATCH 55/56] encapsulate migrate and transform and return Config --- .../manifest_declarative_source.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index 408479295..f6efcc59f 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -63,7 +63,7 @@ from airbyte_cdk.sources.declarative.spec.spec import Spec from airbyte_cdk.sources.message import MessageRepository from airbyte_cdk.sources.streams.core import Stream -from airbyte_cdk.sources.types import ConnectionDefinition +from airbyte_cdk.sources.types import Config, ConnectionDefinition from airbyte_cdk.sources.utils.slice_logger import ( AlwaysLogSliceLogger, DebugSliceLogger, @@ -150,11 +150,7 @@ def __init__( self._spec_component = ( self._constructor.create_component(SpecModel, spec, dict()) if spec else None ) - mutable_config = dict(config) if config else {} - self._migrate_config(config_path, mutable_config, config) - if self._spec_component: - self._spec_component.transform_config(mutable_config) - self._config = mutable_config + self._config = self._migrate_and_transform_config(config_path, config) @property def resolved_manifest(self) -> Mapping[str, Any]: @@ -216,13 +212,13 @@ def _normalize_manifest(self) -> None: normalizer = ManifestNormalizer(self._source_config, self._declarative_component_schema) self._source_config = normalizer.normalize() - def _migrate_config( + def _migrate_and_transform_config( self, config_path: Optional[str], - mutable_config: MutableMapping[str, Any], - config: Optional[Mapping[str, Any]], - ) -> None: - if config_path and config and self._spec_component: + config: Optional[Config], + ) -> Config: + mutable_config = dict(config) if config else {} + if self._spec_component: self._spec_component.migrate_config(mutable_config) if mutable_config != config: if config_path: @@ -235,6 +231,10 @@ def _migrate_config( for message in self.message_repository.consume_queue(): print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) + self._spec_component.transform_config(mutable_config) + + return mutable_config + def _migrate_manifest(self) -> None: """ This method is used to migrate the manifest. It should be called after the manifest has been validated. From 273a88652ef473e54ff6f169b0edbe0ae33e6c0e Mon Sep 17 00:00:00 2001 From: pnilan Date: Thu, 22 May 2025 13:31:24 -0700 Subject: [PATCH 56/56] update typing --- .../manifest_declarative_source.py | 41 +++++++++---------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte_cdk/sources/declarative/manifest_declarative_source.py index f6efcc59f..70c417054 100644 --- a/airbyte_cdk/sources/declarative/manifest_declarative_source.py +++ b/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -144,13 +144,11 @@ def __init__( # apply additional post-processing to the manifest self._post_process_manifest() - self._config: Mapping[str, Any] - self._spec_component: Optional[Spec] spec: Optional[Mapping[str, Any]] = self._source_config.get("spec") - self._spec_component = ( + self._spec_component: Optional[Spec] = ( self._constructor.create_component(SpecModel, spec, dict()) if spec else None ) - self._config = self._migrate_and_transform_config(config_path, config) + self._config = self._migrate_and_transform_config(config_path, config) or {} @property def resolved_manifest(self) -> Mapping[str, Any]: @@ -216,23 +214,24 @@ def _migrate_and_transform_config( self, config_path: Optional[str], config: Optional[Config], - ) -> Config: - mutable_config = dict(config) if config else {} - if self._spec_component: - self._spec_component.migrate_config(mutable_config) - if mutable_config != config: - if config_path: - with open(config_path, "w") as f: - json.dump(mutable_config, f) - self.message_repository.emit_message( - create_connector_config_control_message(mutable_config) - ) - # We have no mechanism for consuming the queue, so we print the messages to stdout - for message in self.message_repository.consume_queue(): - print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) - - self._spec_component.transform_config(mutable_config) - + ) -> Optional[Config]: + if not config: + return None + if not self._spec_component: + return config + mutable_config = dict(config) + self._spec_component.migrate_config(mutable_config) + if mutable_config != config: + if config_path: + with open(config_path, "w") as f: + json.dump(mutable_config, f) + self.message_repository.emit_message( + create_connector_config_control_message(mutable_config) + ) + # We have no mechanism for consuming the queue, so we print the messages to stdout + for message in self.message_repository.consume_queue(): + print(orjson.dumps(AirbyteMessageSerializer.dump(message)).decode()) + self._spec_component.transform_config(mutable_config) return mutable_config def _migrate_manifest(self) -> None: