Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions aws_lambda_powertools/logging/buffer/handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from __future__ import annotations

import logging
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from aws_lambda_powertools.logging.buffer.cache import LoggerBufferCache
from aws_lambda_powertools.logging.buffer.config import LoggerBufferConfig
from aws_lambda_powertools.logging.logger import Logger


class BufferingHandler(logging.Handler):
"""
Handler that buffers logs from external libraries using the source logger's buffer.

The handler intercepts log records from external libraries and
stores them in the source logger's buffer using the same tracer_id mechanism.
Logs are flushed when an error occurs or when explicitly requested.
"""

def __init__(
self,
buffer_cache: LoggerBufferCache,
buffer_config: LoggerBufferConfig,
source_logger: Logger,
):
"""
Initialize the BufferingHandler.

Parameters
----------
buffer_cache : LoggerBufferCache
Shared buffer cache from the source logger
buffer_config : LoggerBufferConfig
Buffer configuration from the source logger
source_logger : Logger
The Powertools Logger instance to delegate buffering logic to
"""
super().__init__()
self.buffer_cache = buffer_cache
self.buffer_config = buffer_config
self.source_logger = source_logger

def emit(self, record: logging.LogRecord) -> None:
"""
Buffer the log record by delegating to source logger's buffering logic.
Call source logger to add a structured record to the buffer.

Parameters
----------
record : logging.LogRecord
The log record from an external logger
"""
self.source_logger._add_log_record_to_buffer(
level=record.levelno,
msg=record.getMessage(),
args=(),
exc_info=record.exc_info,
stack_info=False,
)
25 changes: 24 additions & 1 deletion aws_lambda_powertools/logging/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import logging
from typing import TYPE_CHECKING

from aws_lambda_powertools.logging.buffer.handler import BufferingHandler

if TYPE_CHECKING:
from collections.abc import Callable

Expand All @@ -16,6 +18,7 @@ def copy_config_to_registered_loggers(
source_logger: Logger,
log_level: int | str | None = None,
ignore_log_level=False,
include_buffering=False,
exclude: set[str] | None = None,
include: set[str] | None = None,
) -> None:
Expand All @@ -30,6 +33,8 @@ def copy_config_to_registered_loggers(
Logging level to set to registered loggers, by default uses source_logger logging level
ignore_log_level: bool
Whether to not touch log levels for discovered loggers. log_level param is disregarded when this is set.
include_buffering: bool
Whether to buffer logs from external libraries and report to powertools logger
include : set[str] | None, optional
List of logger names to include, by default all registered loggers are included
exclude : set[str] | None, optional
Expand Down Expand Up @@ -64,7 +69,13 @@ def copy_config_to_registered_loggers(

registered_loggers = _find_registered_loggers(loggers=loggers, filter_func=filter_func)
for logger in registered_loggers:
_configure_logger(source_logger=source_logger, logger=logger, level=level, ignore_log_level=ignore_log_level)
_configure_logger(
source_logger=source_logger,
logger=logger,
level=level,
ignore_log_level=ignore_log_level,
include_buffering=include_buffering,
)


def _include_registered_loggers_filter(loggers: set[str]):
Expand Down Expand Up @@ -92,6 +103,7 @@ def _configure_logger(
logger: logging.Logger,
level: int | str,
ignore_log_level: bool = False,
include_buffering: bool = False,
) -> None:
# customers may not want to copy the same log level from Logger to discovered loggers
if not ignore_log_level:
Expand All @@ -102,6 +114,17 @@ def _configure_logger(
logger.propagate = False # ensure we don't propagate logs to existing loggers, #1073
source_logger.append_keys(name="%(name)s") # include logger name, see #1267

buffer_config = getattr(source_logger, "_buffer_config", None)
if include_buffering and buffer_config is not None:
buffer_handler = BufferingHandler(
buffer_cache=source_logger._buffer_cache,
buffer_config=buffer_config,
source_logger=source_logger,
)
logger.addHandler(buffer_handler)
LOGGER.debug(f"Logger {logger} configured with BufferingHandler")
return # exit earlier and don't add source handlers, would cause double logging

for source_handler in source_logger.handlers:
logger.addHandler(source_handler)
LOGGER.debug(f"Logger {logger} reconfigured to use {source_handler}")
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,14 @@
import random
import string
from enum import Enum
from unittest.mock import patch

import pytest

from aws_lambda_powertools import Logger
from aws_lambda_powertools.logging import formatter, utils
from aws_lambda_powertools.logging.buffer import LoggerBufferConfig
from aws_lambda_powertools.logging.buffer.handler import BufferingHandler


@pytest.fixture
Expand Down Expand Up @@ -315,3 +318,84 @@ def test_copy_config_to_ext_loggers_but_preserve_log_levels(stdout, logger, log_
# THEN external loggers log levels should be preserved
assert logger_1.level != powertools_logger.log_level
assert logger_2.level != powertools_logger.log_level


def test_copy_config_with_buffering_uses_buffering_handler(stdout, logger, log_level):
# GIVEN an external logger and Powertools logger with buffer_config
ext_logger = logger()
buffer_config = LoggerBufferConfig(max_bytes=10240)
powertools_logger = Logger(
service=service_name(),
level=log_level.INFO.value,
stream=stdout,
buffer_config=buffer_config,
)

# WHEN configuration is copied with include_buffering=True
utils.copy_config_to_registered_loggers(
source_logger=powertools_logger,
include_buffering=True,
include={ext_logger.name},
)

# THEN external logger has exactly one handler and it is BufferingHandler (not StreamHandler)
assert len(ext_logger.handlers) == 1
assert isinstance(ext_logger.handlers[0], BufferingHandler)
assert not isinstance(ext_logger.handlers[0], logging.StreamHandler)

# AND when external logger emits a log with mocked tracer_id, it is buffered
with patch("aws_lambda_powertools.logging.logger.get_tracer_id", return_value="test-trace-id"):
ext_logger.info("Test message from external logger")
output = stdout.getvalue()
assert "Test message from external logger" not in output

# AND when buffer is flushed, the log appears
powertools_logger.flush_buffer()
output = stdout.getvalue()
assert "Test message from external logger" in output


def test_copy_config_buffering_without_config_uses_normal_handlers(stdout, logger, log_level):
# GIVEN an external logger and Powertools logger WITHOUT buffer_config
ext_logger = logger()
powertools_logger = Logger(
service=service_name(),
level=log_level.INFO.value,
stream=stdout,
)

# WHEN configuration is copied with include_buffering=True
utils.copy_config_to_registered_loggers(
source_logger=powertools_logger,
include_buffering=True,
include={ext_logger.name},
)

# THEN external logger gets normal Powertools handler (no BufferingHandler)
assert len(ext_logger.handlers) == 1
assert isinstance(ext_logger.handlers[0], logging.StreamHandler)
assert isinstance(ext_logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter)
assert not isinstance(ext_logger.handlers[0], BufferingHandler)


def test_copy_config_to_ext_loggers_default_include_buffering_false(stdout, logger, log_level):
# GIVEN Powertools logger with buffer_config and external logger
ext_logger = logger()
buffer_config = LoggerBufferConfig(max_bytes=10240)
powertools_logger = Logger(
service=service_name(),
level=log_level.INFO.value,
stream=stdout,
buffer_config=buffer_config,
)

# WHEN configuration is copied without include_buffering (default False)
utils.copy_config_to_registered_loggers(
source_logger=powertools_logger,
include={ext_logger.name},
)

# THEN external logger gets normal StreamHandler, not BufferingHandler
assert len(ext_logger.handlers) == 1
assert isinstance(ext_logger.handlers[0], logging.StreamHandler)
assert not isinstance(ext_logger.handlers[0], BufferingHandler)
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from __future__ import annotations

import io
import json
import logging

from aws_lambda_powertools import Logger
from aws_lambda_powertools.logging.buffer.config import LoggerBufferConfig
from aws_lambda_powertools.logging.buffer.handler import BufferingHandler
from aws_lambda_powertools.shared import constants


def test_buffering_handler_init_stores_dependencies():
# GIVEN real buffer_config, source_logger (Logger with buffer), and its buffer_cache
buffer_config = LoggerBufferConfig(max_bytes=10240)
source_logger = Logger(service="test1", buffer_config=buffer_config, stream=io.StringIO())
buffer_cache = source_logger._buffer_cache

# WHEN BufferingHandler is initialized
handler = BufferingHandler(
buffer_cache=buffer_cache,
buffer_config=buffer_config,
source_logger=source_logger,
)

# THEN all dependencies are stored on the instance
assert handler.buffer_cache is buffer_cache
assert handler.buffer_config is buffer_config
assert handler.source_logger is source_logger
assert handler.level == logging.NOTSET


def test_buffering_handler_emit_calls_add_log_record_to_buffer(monkeypatch):
# GIVEN a real Logger with buffer and a BufferingHandler (tracer id set so records are buffered)
monkeypatch.setenv(constants.XRAY_TRACE_ID_ENV, "1-67c39786-5908a82a246fb67f3089263f")
stream = io.StringIO()
buffer_config = LoggerBufferConfig(max_bytes=10240)
source_logger = Logger(service="test2", buffer_config=buffer_config, stream=stream)
handler = BufferingHandler(
buffer_cache=source_logger._buffer_cache,
buffer_config=source_logger._buffer_config,
source_logger=source_logger,
)
record = logging.LogRecord(
name="external",
level=logging.INFO,
pathname="",
lineno=0,
msg="test %s",
args=("arg",),
exc_info=None,
func=None,
)
record.stack_info = None

# WHEN the handler emits the record and the buffer is flushed
handler.emit(record)
source_logger.flush_buffer()

# THEN the buffered message appears in the logger output
output = stream.getvalue()
log_line = json.loads(output.strip())
assert log_line["message"] == "test arg"