Skip to content

feat: logger inheritance #99

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -5,6 +5,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]
### Added
- **Logger**: Support for logger inheritance with `child` parameter

### Fixed
- **Logger**: Log level is now case insensitive via params and env var

## [1.0.2] - 2020-07-16
### Fixed
3 changes: 3 additions & 0 deletions aws_lambda_powertools/logging/formatter.py
Original file line number Diff line number Diff line change
@@ -56,6 +56,9 @@ def __init__(self, **kwargs):
self.format_dict.update(kwargs)
self.default_json_formatter = kwargs.pop("json_default", json_formatter)

def update_formatter(self, **kwargs):
self.format_dict.update(kwargs)

def format(self, record): # noqa: A003
record_dict = record.__dict__.copy()
record_dict["asctime"] = self.formatTime(record, self.datefmt)
118 changes: 85 additions & 33 deletions aws_lambda_powertools/logging/logger.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import copy
import functools
import inspect
import logging
import os
import random
@@ -34,7 +34,7 @@ def _is_cold_start() -> bool:
return cold_start


class Logger(logging.Logger):
class Logger:
"""Creates and setups a logger to format statements in JSON.

Includes service name and any additional key=value into logs
@@ -55,6 +55,8 @@ class Logger(logging.Logger):
service name to be appended in logs, by default "service_undefined"
level : str, optional
logging.level, by default "INFO"
child: bool, optional
create a child Logger named <service>.<caller_file_name>, False by default
sample_rate: float, optional
sample rate for debug calls within execution context defaults to 0.0
stream: sys.stdout, optional
@@ -80,7 +82,7 @@ class Logger(logging.Logger):
>>> def handler(event, context):
logger.info("Hello")

**Append payment_id to previously setup structured log logger**
**Append payment_id to previously setup logger**

>>> from aws_lambda_powertools import Logger
>>> logger = Logger(service="payment")
@@ -89,18 +91,16 @@ class Logger(logging.Logger):
logger.structure_logs(append=True, payment_id=event["payment_id"])
logger.info("Hello")

Parameters
----------
logging : logging.Logger
Inherits Logger
service: str
name of the service to create the logger for, "service_undefined" by default
level: str, int
log level, INFO by default
sampling_rate: float
debug log sampling rate, 0.0 by default
stream: sys.stdout
log stream, stdout by default
**Create child Logger using logging inheritance via child param**

>>> # app.py
>>> import another_file
>>> from aws_lambda_powertools import Logger
>>> logger = Logger(service="payment")
>>>
>>> # another_file.py
>>> from aws_lambda_powertools import Logger
>>> logger = Logger(service="payment", child=True)

Raises
------
@@ -112,19 +112,72 @@ def __init__(
self,
service: str = None,
level: Union[str, int] = None,
child: bool = False,
sampling_rate: float = None,
stream: sys.stdout = None,
**kwargs,
):
self.service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined"
self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or 0.0
self.log_level = level or os.getenv("LOG_LEVEL") or logging.INFO
self.handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout)
self.log_level = self._get_log_level(level)
self.child = child
self._handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout)
self._default_log_keys = {"service": self.service, "sampling_rate": self.sampling_rate}
self.log_keys = copy.copy(self._default_log_keys)

super().__init__(name=self.service, level=self.log_level)

self._logger = self._get_logger()

self._init_logger(**kwargs)

def __getattr__(self, name):
# Proxy attributes not found to actual logger to support backward compatibility
# https://github.com/awslabs/aws-lambda-powertools-python/issues/97
return getattr(self._logger, name)

def _get_log_level(self, level: str):
""" Returns preferred log level set by the customer in upper case """
log_level: str = level or os.getenv("LOG_LEVEL")
log_level = log_level.upper() if log_level is not None else logging.INFO

return log_level

def _get_logger(self):
""" Returns a Logger named {self.service}, or {self.service.filename} for child loggers"""
logger_name = self.service
if self.child:
logger_name = f"{self.service}.{self._get_caller_filename()}"

return logging.getLogger(logger_name)

def _get_caller_filename(self):
""" Return caller filename by finding the caller frame """
# Current frame => _get_logger()
# Previous frame => logger.py
# Before previous frame => Caller
frame = inspect.currentframe()
caller_frame = frame.f_back.f_back.f_back
filename = caller_frame.f_globals["__name__"]

return filename

def _init_logger(self, **kwargs):
"""Configures new logger"""

# Skip configuration if it's a child logger to prevent
# multiple handlers being attached as well as different sampling mechanisms
# and multiple messages from being logged as handlers can be duplicated
if not self.child:
self._configure_sampling()
self._logger.setLevel(self.log_level)
self._logger.addHandler(self._handler)
self.structure_logs(**kwargs)

def _configure_sampling(self):
"""Dynamically set log level based on sampling rate

Raises
------
InvalidLoggerSamplingRateError
When sampling rate provided is not a float
"""
try:
if self.sampling_rate and random.random() <= float(self.sampling_rate):
logger.debug("Setting log level to Debug due to sampling rate")
@@ -134,12 +187,8 @@ def __init__(
f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501
)

self.setLevel(self.log_level)
self.structure_logs(**kwargs)
self.addHandler(self.handler)

def inject_lambda_context(self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False):
"""Decorator to capture Lambda contextual info and inject into struct logging
"""Decorator to capture Lambda contextual info and inject into logger

Parameters
----------
@@ -216,21 +265,24 @@ def structure_logs(self, append: bool = False, **kwargs):
append : bool, optional
[description], by default False
"""
self.handler.setFormatter(JsonFormatter(**self._default_log_keys, **kwargs))

if append:
new_keys = {**self.log_keys, **kwargs}
self.handler.setFormatter(JsonFormatter(**new_keys))

self.log_keys.update(**kwargs)
# Child loggers don't have handlers attached, use its parent handlers
handlers = self._logger.parent.handlers if self.child else self._logger.handlers
for handler in handlers:
if append:
# Update existing formatter in an existing logger handler
handler.formatter.update_formatter(**kwargs)
else:
# Set a new formatter for a logger handler
handler.setFormatter(JsonFormatter(**self._default_log_keys, **kwargs))


def set_package_logger(
level: Union[str, int] = logging.DEBUG, stream: sys.stdout = None, formatter: logging.Formatter = None
):
"""Set an additional stream handler, formatter, and log level for aws_lambda_powertools package logger.

**Package log by default is supressed (NullHandler), this should only used for debugging.
**Package log by default is suppressed (NullHandler), this should only used for debugging.
This is separate from application Logger class utility**

Example
35 changes: 35 additions & 0 deletions docs/content/core/logger.mdx
Original file line number Diff line number Diff line change
@@ -183,6 +183,41 @@ def handler(event, context):
```
</details>

## Reusing Logger across your code

Logger supports inheritance via `child` parameter. This allows you to create multiple Loggers across your code base, and propagate changes such as new keys to all Loggers.

```python:title=collect.py
# POWERTOOLS_SERVICE_NAME: "payment"
import shared # Creates a child logger named "payment.shared"
from aws_lambda_powertools import Logger

logger = Logger()

def handler(event, context):
shared.inject_payment_id(event) # highlight-line
logger.structure_logs(append=True, order_id=event["order_id"]) # highlight-line
...
```

```python:title=shared.py
# POWERTOOLS_SERVICE_NAME: "payment"
from aws_lambda_powertools import Logger

logger = Logger(child=True) # highlight-line

def inject_payment_id(event):
logger.structure_logs(append=True, payment_id=event["payment_id"])
```

In this example, `Logger` will create a parent logger named `payment` and a child logger named `payment.shared`. Any changes in the parent and child `Loggers` will be propagated among them.

If you ever forget to use `child` param, we will return an existing `Logger` with the same `service` name.

<Note type="info">
Child loggers will be named after the following convention <code>service.filename</code>.
</Note><br/>

## Sampling debug logs

You can dynamically set a percentage of your logs to **DEBUG** level using `sample_rate` param or via env var `POWERTOOLS_LOGGER_SAMPLE_RATE`.
34 changes: 6 additions & 28 deletions tests/functional/test_aws_lambda_logging.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""aws_lambda_logging tests."""
import io
import json
import logging

import pytest

@@ -13,25 +12,8 @@ def stdout():
return io.StringIO()


@pytest.fixture
def handler(stdout):
return logging.StreamHandler(stdout)


@pytest.fixture
def logger():
return logging.getLogger(__name__)


@pytest.fixture
def root_logger(handler):
logging.root.addHandler(handler)
yield logging.root
logging.root.removeHandler(handler)


@pytest.mark.parametrize("level", ["DEBUG", "WARNING", "ERROR", "INFO", "CRITICAL"])
def test_setup_with_valid_log_levels(root_logger, stdout, level):
def test_setup_with_valid_log_levels(stdout, level):
logger = Logger(level=level, stream=stdout, request_id="request id!", another="value")
msg = "This is a test"
log_command = {
@@ -55,7 +37,7 @@ def test_setup_with_valid_log_levels(root_logger, stdout, level):
assert "exception" not in log_dict


def test_logging_exception_traceback(root_logger, stdout):
def test_logging_exception_traceback(stdout):
logger = Logger(level="DEBUG", stream=stdout, request_id="request id!", another="value")

try:
@@ -69,7 +51,7 @@ def test_logging_exception_traceback(root_logger, stdout):
assert "exception" in log_dict


def test_setup_with_invalid_log_level(root_logger, logger, stdout):
def test_setup_with_invalid_log_level(stdout):
with pytest.raises(ValueError) as e:
Logger(level="not a valid log level")
assert "Unknown level" in e.value.args[0]
@@ -82,11 +64,7 @@ def check_log_dict(log_dict):
assert "message" in log_dict


def test_setup_with_bad_level_does_not_fail():
Logger("DBGG", request_id="request id!", another="value")


def test_with_dict_message(root_logger, stdout):
def test_with_dict_message(stdout):
logger = Logger(level="DEBUG", stream=stdout)

msg = {"x": "isx"}
@@ -97,7 +75,7 @@ def test_with_dict_message(root_logger, stdout):
assert msg == log_dict["message"]


def test_with_json_message(root_logger, stdout):
def test_with_json_message(stdout):
logger = Logger(stream=stdout)

msg = {"x": "isx"}
@@ -108,7 +86,7 @@ def test_with_json_message(root_logger, stdout):
assert msg == log_dict["message"]


def test_with_unserialisable_value_in_message(root_logger, stdout):
def test_with_unserialisable_value_in_message(stdout):
logger = Logger(level="DEBUG", stream=stdout)

class X:
73 changes: 57 additions & 16 deletions tests/functional/test_logger.py
Original file line number Diff line number Diff line change
@@ -15,18 +15,6 @@ def stdout():
return io.StringIO()


@pytest.fixture
def handler(stdout):
return logging.StreamHandler(stdout)


@pytest.fixture
def root_logger(handler):
logging.root.addHandler(handler)
yield logging.root
logging.root.removeHandler(handler)


@pytest.fixture
def lambda_context():
lambda_context = {
@@ -45,14 +33,14 @@ def lambda_event():


def capture_logging_output(stdout):
return json.loads(stdout.getvalue())
return json.loads(stdout.getvalue().strip())


def capture_multiple_logging_statements_output(stdout):
return [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line]


def test_setup_service_name(root_logger, stdout):
def test_setup_service_name(stdout):
service_name = "payment"
# GIVEN Logger is initialized
# WHEN service is explicitly defined
@@ -96,7 +84,7 @@ def test_setup_sampling_rate_env_var(monkeypatch, stdout):
# WHEN samping rate is explicitly set to 100% via POWERTOOLS_LOGGER_SAMPLE_RATE env
sampling_rate = "1"
monkeypatch.setenv("POWERTOOLS_LOGGER_SAMPLE_RATE", sampling_rate)
logger = Logger(stream=stdout, level="INFO")
logger = Logger(stream=stdout)
logger.debug("I am being sampled")

# THEN sampling rate should be equals POWERTOOLS_LOGGER_SAMPLE_RATE value
@@ -255,7 +243,7 @@ def test_logger_invalid_sampling_rate():
# WHEN sampling_rate non-numeric value
# THEN we should raise InvalidLoggerSamplingRateError
with pytest.raises(InvalidLoggerSamplingRateError):
Logger(sampling_rate="TEST")
Logger(stream=stdout, sampling_rate="TEST")


def test_inject_lambda_context_with_structured_log(lambda_context, stdout):
@@ -283,3 +271,56 @@ def handler(event, context):
)
for key in expected_logger_context_keys:
assert key in log


def test_logger_children_propagate_changes(stdout):
# GIVEN Loggers are initialized
# create child logger before parent to mimick
# importing logger from another module/file
# as loggers are created in global scope
child = Logger(stream=stdout, service="order", child=True)
parent = Logger(stream=stdout, service="order")

# WHEN a child Logger adds an additional key
child.structure_logs(append=True, customer_id="value")

# THEN child Logger changes should propagate to parent
# and subsequent log statements should have the latest value
parent.info("Hello parent")
child.info("Hello child")

parent_log, child_log = capture_multiple_logging_statements_output(stdout)
assert "customer_id" in parent_log
assert "customer_id" in child_log
assert child.parent.name == "order"


def test_logger_child_not_set_returns_same_logger(stdout):
# GIVEN two Loggers are initialized with the same service name
# WHEN child param isn't set
logger_one = Logger(service="something")
logger_two = Logger(service="something")

# THEN we should have two Logger instances
# however inner logger wise should be the same
assert id(logger_one) != id(logger_two)
assert logger_one._logger is logger_two._logger
assert logger_one.name is logger_two.name


def test_logger_level_case_insensitive(stdout):
# GIVEN a Loggers is initialized
# WHEN log level is set as "info" instead of "INFO"
logger = Logger(level="info")

# THEN we should correctly set log level as INFO
assert logger.level == logging.INFO


def test_logger_level_not_set(stdout):
# GIVEN a Loggers is initialized
# WHEN no log level was passed
logger = Logger(level="info")

# THEN we should default to INFO
assert logger.level == logging.INFO