diff --git a/aws_lambda_powertools/logging/buffer/handler.py b/aws_lambda_powertools/logging/buffer/handler.py new file mode 100644 index 00000000000..65802504c9a --- /dev/null +++ b/aws_lambda_powertools/logging/buffer/handler.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from aws_lambda_powertools.logging.buffer.cache import LoggerBufferCache + from aws_lambda_powertools.logging.buffer.config import LoggerBufferConfig + from aws_lambda_powertools.logging.logger import Logger + + +class BufferingHandler(logging.Handler): + """ + Handler that buffers logs from external libraries using the source logger's buffer. + + The handler intercepts log records from external libraries and + stores them in the source logger's buffer using the same tracer_id mechanism. + Logs are flushed when an error occurs or when explicitly requested. + """ + + def __init__( + self, + buffer_cache: LoggerBufferCache, + buffer_config: LoggerBufferConfig, + source_logger: Logger, + ): + """ + Initialize the BufferingHandler. + + Parameters + ---------- + buffer_cache : LoggerBufferCache + Shared buffer cache from the source logger + buffer_config : LoggerBufferConfig + Buffer configuration from the source logger + source_logger : Logger + The Powertools Logger instance to delegate buffering logic to + """ + super().__init__() + self.buffer_cache = buffer_cache + self.buffer_config = buffer_config + self.source_logger = source_logger + + def emit(self, record: logging.LogRecord) -> None: + """ + Buffer the log record by delegating to source logger's buffering logic. + Call source logger to add a structured record to the buffer. + + Parameters + ---------- + record : logging.LogRecord + The log record from an external logger + """ + self.source_logger._add_log_record_to_buffer( + level=record.levelno, + msg=record.getMessage(), + args=(), + exc_info=record.exc_info, + stack_info=False, + ) diff --git a/aws_lambda_powertools/logging/utils.py b/aws_lambda_powertools/logging/utils.py index 91a683ee0ce..db469d71ebd 100644 --- a/aws_lambda_powertools/logging/utils.py +++ b/aws_lambda_powertools/logging/utils.py @@ -3,6 +3,8 @@ import logging from typing import TYPE_CHECKING +from aws_lambda_powertools.logging.buffer.handler import BufferingHandler + if TYPE_CHECKING: from collections.abc import Callable @@ -16,6 +18,7 @@ def copy_config_to_registered_loggers( source_logger: Logger, log_level: int | str | None = None, ignore_log_level=False, + include_buffering=False, exclude: set[str] | None = None, include: set[str] | None = None, ) -> None: @@ -30,6 +33,8 @@ def copy_config_to_registered_loggers( Logging level to set to registered loggers, by default uses source_logger logging level ignore_log_level: bool Whether to not touch log levels for discovered loggers. log_level param is disregarded when this is set. + include_buffering: bool + Whether to buffer logs from external libraries and report to powertools logger include : set[str] | None, optional List of logger names to include, by default all registered loggers are included exclude : set[str] | None, optional @@ -64,7 +69,13 @@ def copy_config_to_registered_loggers( registered_loggers = _find_registered_loggers(loggers=loggers, filter_func=filter_func) for logger in registered_loggers: - _configure_logger(source_logger=source_logger, logger=logger, level=level, ignore_log_level=ignore_log_level) + _configure_logger( + source_logger=source_logger, + logger=logger, + level=level, + ignore_log_level=ignore_log_level, + include_buffering=include_buffering, + ) def _include_registered_loggers_filter(loggers: set[str]): @@ -92,6 +103,7 @@ def _configure_logger( logger: logging.Logger, level: int | str, ignore_log_level: bool = False, + include_buffering: bool = False, ) -> None: # customers may not want to copy the same log level from Logger to discovered loggers if not ignore_log_level: @@ -102,6 +114,17 @@ def _configure_logger( logger.propagate = False # ensure we don't propagate logs to existing loggers, #1073 source_logger.append_keys(name="%(name)s") # include logger name, see #1267 + buffer_config = getattr(source_logger, "_buffer_config", None) + if include_buffering and buffer_config is not None: + buffer_handler = BufferingHandler( + buffer_cache=source_logger._buffer_cache, + buffer_config=buffer_config, + source_logger=source_logger, + ) + logger.addHandler(buffer_handler) + LOGGER.debug(f"Logger {logger} configured with BufferingHandler") + return # exit earlier and don't add source handlers, would cause double logging + for source_handler in source_logger.handlers: logger.addHandler(source_handler) LOGGER.debug(f"Logger {logger} reconfigured to use {source_handler}") diff --git a/tests/functional/logger/required_dependencies/test_logger_utils.py b/tests/functional/logger/required_dependencies/test_logger_utils.py index f0a2baf3cf4..2904e727a28 100644 --- a/tests/functional/logger/required_dependencies/test_logger_utils.py +++ b/tests/functional/logger/required_dependencies/test_logger_utils.py @@ -6,11 +6,14 @@ import random import string from enum import Enum +from unittest.mock import patch import pytest from aws_lambda_powertools import Logger from aws_lambda_powertools.logging import formatter, utils +from aws_lambda_powertools.logging.buffer import LoggerBufferConfig +from aws_lambda_powertools.logging.buffer.handler import BufferingHandler @pytest.fixture @@ -315,3 +318,84 @@ def test_copy_config_to_ext_loggers_but_preserve_log_levels(stdout, logger, log_ # THEN external loggers log levels should be preserved assert logger_1.level != powertools_logger.log_level assert logger_2.level != powertools_logger.log_level + + +def test_copy_config_with_buffering_uses_buffering_handler(stdout, logger, log_level): + # GIVEN an external logger and Powertools logger with buffer_config + ext_logger = logger() + buffer_config = LoggerBufferConfig(max_bytes=10240) + powertools_logger = Logger( + service=service_name(), + level=log_level.INFO.value, + stream=stdout, + buffer_config=buffer_config, + ) + + # WHEN configuration is copied with include_buffering=True + utils.copy_config_to_registered_loggers( + source_logger=powertools_logger, + include_buffering=True, + include={ext_logger.name}, + ) + + # THEN external logger has exactly one handler and it is BufferingHandler (not StreamHandler) + assert len(ext_logger.handlers) == 1 + assert isinstance(ext_logger.handlers[0], BufferingHandler) + assert not isinstance(ext_logger.handlers[0], logging.StreamHandler) + + # AND when external logger emits a log with mocked tracer_id, it is buffered + with patch("aws_lambda_powertools.logging.logger.get_tracer_id", return_value="test-trace-id"): + ext_logger.info("Test message from external logger") + output = stdout.getvalue() + assert "Test message from external logger" not in output + + # AND when buffer is flushed, the log appears + powertools_logger.flush_buffer() + output = stdout.getvalue() + assert "Test message from external logger" in output + + +def test_copy_config_buffering_without_config_uses_normal_handlers(stdout, logger, log_level): + # GIVEN an external logger and Powertools logger WITHOUT buffer_config + ext_logger = logger() + powertools_logger = Logger( + service=service_name(), + level=log_level.INFO.value, + stream=stdout, + ) + + # WHEN configuration is copied with include_buffering=True + utils.copy_config_to_registered_loggers( + source_logger=powertools_logger, + include_buffering=True, + include={ext_logger.name}, + ) + + # THEN external logger gets normal Powertools handler (no BufferingHandler) + assert len(ext_logger.handlers) == 1 + assert isinstance(ext_logger.handlers[0], logging.StreamHandler) + assert isinstance(ext_logger.handlers[0].formatter, formatter.LambdaPowertoolsFormatter) + assert not isinstance(ext_logger.handlers[0], BufferingHandler) + + +def test_copy_config_to_ext_loggers_default_include_buffering_false(stdout, logger, log_level): + # GIVEN Powertools logger with buffer_config and external logger + ext_logger = logger() + buffer_config = LoggerBufferConfig(max_bytes=10240) + powertools_logger = Logger( + service=service_name(), + level=log_level.INFO.value, + stream=stdout, + buffer_config=buffer_config, + ) + + # WHEN configuration is copied without include_buffering (default False) + utils.copy_config_to_registered_loggers( + source_logger=powertools_logger, + include={ext_logger.name}, + ) + + # THEN external logger gets normal StreamHandler, not BufferingHandler + assert len(ext_logger.handlers) == 1 + assert isinstance(ext_logger.handlers[0], logging.StreamHandler) + assert not isinstance(ext_logger.handlers[0], BufferingHandler) diff --git a/tests/unit/logger/required_dependencies/test_logger_buffer_handler.py b/tests/unit/logger/required_dependencies/test_logger_buffer_handler.py new file mode 100644 index 00000000000..17601762ce7 --- /dev/null +++ b/tests/unit/logger/required_dependencies/test_logger_buffer_handler.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import io +import json +import logging + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.buffer.config import LoggerBufferConfig +from aws_lambda_powertools.logging.buffer.handler import BufferingHandler +from aws_lambda_powertools.shared import constants + + +def test_buffering_handler_init_stores_dependencies(): + # GIVEN real buffer_config, source_logger (Logger with buffer), and its buffer_cache + buffer_config = LoggerBufferConfig(max_bytes=10240) + source_logger = Logger(service="test1", buffer_config=buffer_config, stream=io.StringIO()) + buffer_cache = source_logger._buffer_cache + + # WHEN BufferingHandler is initialized + handler = BufferingHandler( + buffer_cache=buffer_cache, + buffer_config=buffer_config, + source_logger=source_logger, + ) + + # THEN all dependencies are stored on the instance + assert handler.buffer_cache is buffer_cache + assert handler.buffer_config is buffer_config + assert handler.source_logger is source_logger + assert handler.level == logging.NOTSET + + +def test_buffering_handler_emit_calls_add_log_record_to_buffer(monkeypatch): + # GIVEN a real Logger with buffer and a BufferingHandler (tracer id set so records are buffered) + monkeypatch.setenv(constants.XRAY_TRACE_ID_ENV, "1-67c39786-5908a82a246fb67f3089263f") + stream = io.StringIO() + buffer_config = LoggerBufferConfig(max_bytes=10240) + source_logger = Logger(service="test2", buffer_config=buffer_config, stream=stream) + handler = BufferingHandler( + buffer_cache=source_logger._buffer_cache, + buffer_config=source_logger._buffer_config, + source_logger=source_logger, + ) + record = logging.LogRecord( + name="external", + level=logging.INFO, + pathname="", + lineno=0, + msg="test %s", + args=("arg",), + exc_info=None, + func=None, + ) + record.stack_info = None + + # WHEN the handler emits the record and the buffer is flushed + handler.emit(record) + source_logger.flush_buffer() + + # THEN the buffered message appears in the logger output + output = stream.getvalue() + log_line = json.loads(output.strip()) + assert log_line["message"] == "test arg"