Skip to content

Consolidate env reading to single config object. #600

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 52 commits into from
Jun 11, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
283e118
Consolidate env reading to single config object.
purple4reina May 15, 2025
05398fc
Tests for reading config values.
purple4reina May 15, 2025
dfc8581
Lazy read env values on config.
purple4reina May 15, 2025
40b666e
Tests correctly reset config object.
purple4reina May 15, 2025
fb20e39
Black.
purple4reina May 15, 2025
55a47f5
More black.
purple4reina May 15, 2025
5da0267
Use config.
purple4reina May 15, 2025
9dd96f1
Correct config import.
purple4reina Jun 3, 2025
d48a546
???????????
purple4reina Jun 4, 2025
f0ad15d
Fix unittests.
purple4reina Jun 4, 2025
b89acab
Consolidate reset.
purple4reina Jun 4, 2025
ab67bd4
Flush to logs from config.
purple4reina Jun 4, 2025
91887a9
Add logs_injection.
purple4reina Jun 4, 2025
c4c3d8d
Add merge_xray_traces.
purple4reina Jun 4, 2025
e2426f6
Use functionname.
purple4reina Jun 4, 2025
2f640ef
Add service.
purple4reina Jun 4, 2025
192b9c1
Add trace_extractor.
purple4reina Jun 4, 2025
ddd25c6
Add capture_payload_max_depth.
purple4reina Jun 4, 2025
09adc0f
UNblack.
purple4reina Jun 4, 2025
5c81289
Add profiling_enabled.
purple4reina Jun 4, 2025
63b3d4f
Add llmobs_enabled.
purple4reina Jun 4, 2025
62378bc
Add exception_replay_enabled.
purple4reina Jun 4, 2025
76f5d2d
Add env.
purple4reina Jun 4, 2025
47c808a
Add capture_payload_enabled.
purple4reina Jun 4, 2025
0e35ee1
Add min_cold_start_trace_duration.
purple4reina Jun 4, 2025
863fa9e
Add local_test.
purple4reina Jun 4, 2025
3210240
Add cold_start_trace_skip_lib.
purple4reina Jun 4, 2025
a1e4993
Default should always be string.
purple4reina Jun 4, 2025
2dfd8ad
Parametrize the parametrize.
purple4reina Jun 4, 2025
adfe3ec
Add make_inferred_span.
purple4reina Jun 4, 2025
6308260
Add encode_authorizer_context.
purple4reina Jun 4, 2025
9a23c2a
Add decode_authorizer_context.
purple4reina Jun 4, 2025
831f368
Use config.cold_start_tracing.
purple4reina Jun 4, 2025
51eb5a7
Rm unused.
purple4reina Jun 4, 2025
c84b698
Black.
purple4reina Jun 4, 2025
e405282
Rename.
purple4reina Jun 4, 2025
2b2148c
reorder.
purple4reina Jun 4, 2025
740be3f
Clean up depends on tests.
purple4reina Jun 4, 2025
f492d30
Cold start already depends on tracing.
purple4reina Jun 4, 2025
23ac6b9
Testing for warning log.
purple4reina Jun 4, 2025
bbe0da4
More black.
purple4reina Jun 4, 2025
d8041b1
testing.x
purple4reina Jun 5, 2025
603c0e0
Debugging.
purple4reina Jun 5, 2025
92936f3
More debug.
purple4reina Jun 5, 2025
1c32c44
Undebug.
purple4reina Jun 5, 2025
20d733c
Fix multiple env accessing for same key.
purple4reina Jun 5, 2025
2957287
Add data_streams_enabled.
purple4reina Jun 10, 2025
76b8ce9
Shorter line.
purple4reina Jun 10, 2025
fad3d08
Fix tag_object.
purple4reina Jun 10, 2025
c90af27
Telemetry depends on tracing.
purple4reina Jun 10, 2025
43e31fc
Remove unneeded comment.
purple4reina Jun 10, 2025
7bc8751
Cut memory overhead.
purple4reina Jun 10, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions datadog_lambda/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
import os

from datadog_lambda.fips import fips_mode_enabled
from datadog_lambda.config import config

logger = logging.getLogger(__name__)
KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
Expand Down Expand Up @@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext):
is added. We need to try decrypting the API key both with and without the encryption context.
"""
# Try without encryption context, in case API key was encrypted using the AWS CLI
function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
try:
plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
"Plaintext"
Expand All @@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext):
plaintext = kms_client.decrypt(
CiphertextBlob=decoded_bytes,
EncryptionContext={
KMS_ENCRYPTION_CONTEXT_KEY: function_name,
KMS_ENCRYPTION_CONTEXT_KEY: config.function_name,
},
)["Plaintext"].decode("utf-8")

Expand All @@ -66,7 +65,7 @@ def get_api_key() -> str:
DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", ""))

LAMBDA_REGION = os.environ.get("AWS_REGION", "")
if fips_mode_enabled:
if config.fips_mode_enabled:
logger.debug(
"FIPS mode is enabled, using FIPS endpoints for secrets management."
)
Expand All @@ -82,7 +81,7 @@ def get_api_key() -> str:
return ""
endpoint_url = (
f"https://secretsmanager-fips.{secrets_region}.amazonaws.com"
if fips_mode_enabled
if config.fips_mode_enabled
else None
)
secrets_manager_client = _boto3_client(
Expand All @@ -95,7 +94,7 @@ def get_api_key() -> str:
# SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html
fips_endpoint = (
f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com"
if fips_mode_enabled
if config.fips_mode_enabled
else None
)
ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint)
Expand All @@ -106,7 +105,7 @@ def get_api_key() -> str:
# KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html
fips_endpoint = (
f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com"
if fips_mode_enabled
if config.fips_mode_enabled
else None
)
kms_client = _boto3_client("kms", endpoint_url=fips_endpoint)
Expand All @@ -118,7 +117,7 @@ def get_api_key() -> str:


def init_api():
if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
if not config.flush_to_log:
# Make sure that this package would always be lazy-loaded/outside from the critical path
# since underlying packages are quite heavy to load
# and useless with the extension unless sending metrics with timestamps
Expand Down
12 changes: 3 additions & 9 deletions datadog_lambda/cold_start.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import time
import os
from typing import List, Hashable
import logging

from datadog_lambda.config import config

logger = logging.getLogger(__name__)

_cold_start = True
Expand Down Expand Up @@ -86,14 +87,12 @@ def reset_node_stacks():

def push_node(module_name, file_path):
node = ImportNode(module_name, file_path, time.time_ns())
global import_stack
if import_stack:
import_stack[-1].children.append(node)
import_stack.append(node)


def pop_node(module_name):
global import_stack
if not import_stack:
return
node = import_stack.pop()
Expand All @@ -102,7 +101,6 @@ def pop_node(module_name):
end_time_ns = time.time_ns()
node.end_time_ns = end_time_ns
if not import_stack: # import_stack empty, a root node has been found
global root_nodes
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These globals are not required because the affected objects are lists.

root_nodes.append(node)


Expand Down Expand Up @@ -147,11 +145,7 @@ def wrapped_find_spec(*args, **kwargs):


def initialize_cold_start_tracing():
if (
is_new_sandbox()
and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
):
if is_new_sandbox() and config.cold_start_tracing:
from sys import meta_path

for importer in meta_path:
Expand Down
145 changes: 145 additions & 0 deletions datadog_lambda/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache License Version 2.0.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

import logging
import os

logger = logging.getLogger(__name__)


def _get_env(key, default=None, cast=None, depends_on_tracing=False):
@property
def _getter(self):
if not hasattr(self, prop_key):
val = self._resolve_env(key, default, cast, depends_on_tracing)
setattr(self, prop_key, val)
return getattr(self, prop_key)

prop_key = f"_config_{key}"
return _getter


def as_bool(val):
return val.lower() == "true" or val == "1"


def as_list(val):
return [val.strip() for val in val.split(",") if val.strip()]


class Config:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Code Quality Violation

Class Config should have an init method (...read more)

Ensure that a class has an __init__ method. This check is bypassed when the class is a data class (annotated with @dataclass).

View in Datadog  Leave us feedback  Documentation

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Code Quality Violation

Class Config should have an init method (...read more)

Ensure that a class has an __init__ method. This check is bypassed when the class is a data class (annotated with @dataclass).

View in Datadog  Leave us feedback  Documentation

def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False):
if depends_on_tracing and not self.trace_enabled:
return False
val = os.environ.get(key, default)
if cast is not None:
try:
val = cast(val)
except (ValueError, TypeError):
msg = (
"Failed to cast environment variable '%s' with "
"value '%s' to type %s. Using default value '%s'."
)
logger.warning(msg, key, val, cast.__name__, default)
val = default
return val

service = _get_env("DD_SERVICE")
env = _get_env("DD_ENV")

cold_start_tracing = _get_env(
"DD_COLD_START_TRACING", "true", as_bool, depends_on_tracing=True
)
min_cold_start_trace_duration = _get_env("DD_MIN_COLD_START_DURATION", 3, int)
cold_start_trace_skip_lib = _get_env(
"DD_COLD_START_TRACE_SKIP_LIB",
"ddtrace.internal.compat,ddtrace.filters",
as_list,
)

capture_payload_max_depth = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", 10, int)
capture_payload_enabled = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD", "false", as_bool)

trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool)
make_inferred_span = _get_env(
"DD_TRACE_MANAGED_SERVICES", "true", as_bool, depends_on_tracing=True
)
encode_authorizer_context = _get_env(
"DD_ENCODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
)
decode_authorizer_context = _get_env(
"DD_DECODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
)
add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool)
trace_extractor = _get_env("DD_TRACE_EXTRACTOR")

enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool)

flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool)
flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool)
logs_injection = _get_env("DD_LOGS_INJECTION", "true", as_bool)
merge_xray_traces = _get_env("DD_MERGE_XRAY_TRACES", "false", as_bool)

telemetry_enabled = _get_env(
"DD_INSTRUMENTATION_TELEMETRY_ENABLED",
"false",
as_bool,
depends_on_tracing=True,
)
otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool)
profiling_enabled = _get_env("DD_PROFILING_ENABLED", "false", as_bool)
llmobs_enabled = _get_env("DD_LLMOBS_ENABLED", "false", as_bool)
exception_replay_enabled = _get_env("DD_EXCEPTION_REPLAY_ENABLED", "false", as_bool)
data_streams_enabled = _get_env(
"DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True
)

is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-"))

local_test = _get_env("DD_LOCAL_TEST", "false", as_bool)
integration_test = _get_env("DD_INTEGRATION_TEST", "false", as_bool)

aws_lambda_function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME")

@property
def function_name(self):
if not hasattr(self, "_config_function_name"):
if self.aws_lambda_function_name is None:
self._config_function_name = "function"
else:
self._config_function_name = self.aws_lambda_function_name
return self._config_function_name

@property
def is_lambda_context(self):
if not hasattr(self, "_config_is_lambda_context"):
self._config_is_lambda_context = bool(self.aws_lambda_function_name)
return self._config_is_lambda_context

@property
def fips_mode_enabled(self):
if not hasattr(self, "_config_fips_mode_enabled"):
self._config_fips_mode_enabled = (
os.environ.get(
"DD_LAMBDA_FIPS_MODE",
"true" if self.is_gov_region else "false",
).lower()
== "true"
)
return self._config_fips_mode_enabled

def _reset(self):
for attr in dir(self):
if attr.startswith("_config_"):
delattr(self, attr)


config = Config()

if config.is_gov_region or config.fips_mode_enabled:
logger.debug(
"Python Lambda Layer FIPS mode is %s.",
"enabled" if config.fips_mode_enabled else "not enabled",
)
19 changes: 0 additions & 19 deletions datadog_lambda/fips.py

This file was deleted.

17 changes: 5 additions & 12 deletions datadog_lambda/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,13 @@

import enum
import logging
import os
import time
from datetime import datetime, timedelta

import ujson as json

from datadog_lambda.config import config
from datadog_lambda.extension import should_use_extension
from datadog_lambda.fips import fips_mode_enabled
from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags

logger = logging.getLogger(__name__)
Expand All @@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum):
def _select_metrics_handler():
if should_use_extension:
return MetricsHandler.EXTENSION
if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
if config.flush_to_log:
return MetricsHandler.FORWARDER

if fips_mode_enabled:
if config.fips_mode_enabled:
logger.debug(
"With FIPS mode enabled, the Datadog API metrics handler is unavailable."
)
Expand All @@ -58,14 +57,8 @@ def _select_metrics_handler():
from datadog_lambda.api import init_api
from datadog_lambda.thread_stats_writer import ThreadStatsWriter

flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
init_api()
lambda_stats = ThreadStatsWriter(flush_in_thread)


enhanced_metrics_enabled = (
os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
)
lambda_stats = ThreadStatsWriter(config.flush_in_thread)


def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
Expand Down Expand Up @@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context):
metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
lambda_context (object): Lambda context dict passed to the function by AWS
"""
if not enhanced_metrics_enabled:
if not config.enhanced_metrics_enabled:
logger.debug(
"Not submitting enhanced metric %s because enhanced metrics are disabled",
metric_name,
Expand Down
12 changes: 4 additions & 8 deletions datadog_lambda/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019 Datadog, Inc.

import os
import sys
import logging
import zlib
Expand All @@ -13,10 +12,8 @@
from wrapt.importer import when_imported
from ddtrace import patch_all as patch_all_dd

from datadog_lambda.tracing import (
get_dd_trace_context,
dd_tracing_enabled,
)
from datadog_lambda.config import config
from datadog_lambda.tracing import get_dd_trace_context
from collections.abc import MutableMapping

logger = logging.getLogger(__name__)
Expand All @@ -32,7 +29,7 @@ def patch_all():
"""
_patch_for_integration_tests()

if dd_tracing_enabled:
if config.trace_enabled:
patch_all_dd()
else:
_patch_http()
Expand All @@ -44,8 +41,7 @@ def _patch_for_integration_tests():
Patch `requests` to log the outgoing requests for integration tests.
"""
global _integration_tests_patched
is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
if not _integration_tests_patched and is_in_tests:
if not _integration_tests_patched and config.integration_test:
wrap("requests", "Session.send", _log_request)
_integration_tests_patched = True

Expand Down
Loading
Loading