Structured logging for Python that emphasizes simplicity, power, and performance
Complete integration with Python's standard logging module, including stdlib-compatible loggers, formatters, and processors for bridging structlog with existing logging infrastructure. This integration allows structlog to work seamlessly with existing Python logging configurations.
BoundLogger implementation that integrates with Python's standard logging module, providing both structlog's structured logging capabilities and compatibility with stdlib logging.
class BoundLogger(BoundLoggerBase):
"""
Standard library compatible bound logger.
Provides both structlog context methods and standard logging methods,
making it compatible with existing stdlib logging code.
"""
# Standard logging methods
def debug(self, event=None, **kw) -> None: ...
def info(self, event=None, **kw) -> None: ...
def warning(self, event=None, **kw) -> None: ...
def warn(self, event=None, **kw) -> None: ... # Deprecated alias
def error(self, event=None, **kw) -> None: ...
def critical(self, event=None, **kw) -> None: ...
def fatal(self, event=None, **kw) -> None: ... # Alias for critical
def exception(self, event=None, **kw) -> None: ...
def log(self, level, event=None, **kw) -> None: ...
# Async logging methods
def adebug(self, event=None, **kw) -> None: ...
def ainfo(self, event=None, **kw) -> None: ...
def awarning(self, event=None, **kw) -> None: ...
def awarn(self, event=None, **kw) -> None: ...
def aerror(self, event=None, **kw) -> None: ...
def acritical(self, event=None, **kw) -> None: ...
def afatal(self, event=None, **kw) -> None: ...
def aexception(self, event=None, **kw) -> None: ...
def alog(self, level, event=None, **kw) -> None: ...
# Standard logging properties
@property
def name(self) -> str: ...
@property
def level(self) -> int: ...
@property
def parent(self) -> logging.Logger: ...
@property
def propagate(self) -> bool: ...
@property
def handlers(self) -> list[logging.Handler]: ...
@property
def disabled(self) -> bool: ...
# Standard logging methods
def setLevel(self, level) -> None: ...
def findCaller(self, stack_info=False, stacklevel=1) -> tuple: ...
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None) -> logging.LogRecord: ...
def handle(self, record) -> None: ...
def addHandler(self, hdlr) -> None: ...
def removeHandler(self, hdlr) -> None: ...
def hasHandlers(self) -> bool: ...
def callHandlers(self, record) -> None: ...
def getEffectiveLevel(self) -> int: ...
def isEnabledFor(self, level) -> bool: ...
def getChild(self, suffix) -> logging.Logger: ...Async wrapper around BoundLogger that exposes logging methods as async versions, allowing non-blocking logging in async applications.
class AsyncBoundLogger:
"""
Wraps a BoundLogger & exposes its logging methods as async versions.
Instead of blocking the program, logging operations are run asynchronously
in a thread pool executor. This means more computational overhead per log
call but prevents processor chains and I/O from blocking async applications.
.. versionadded:: 20.2.0
.. deprecated:: 23.1.0 Use async methods on BoundLogger instead
"""
def __init__(
self,
logger,
processors=None,
context=None,
_sync_bl=None,
_loop=None
): ...
# Context management methods
def bind(self, **new_values) -> AsyncBoundLogger: ...
def unbind(self, *keys) -> AsyncBoundLogger: ...
def try_unbind(self, *keys) -> AsyncBoundLogger: ...
def new(self, **new_values) -> AsyncBoundLogger: ...
# Async logging methods
async def adebug(self, event: str, *args, **kw) -> None:
"""Log using debug(), but asynchronously in a separate thread."""
async def ainfo(self, event: str, *args, **kw) -> None:
"""Log using info(), but asynchronously in a separate thread."""
async def awarning(self, event: str, *args, **kw) -> None:
"""Log using warning(), but asynchronously in a separate thread."""
async def aerror(self, event: str, *args, **kw) -> None:
"""Log using error(), but asynchronously in a separate thread."""
async def acritical(self, event: str, *args, **kw) -> None:
"""Log using critical(), but asynchronously in a separate thread."""
async def afatal(self, event: str, *args, **kw) -> None:
"""Log using critical(), but asynchronously in a separate thread."""
async def aexception(self, event: str, *args, **kw) -> None:
"""Log using exception(), but asynchronously in a separate thread."""
async def alog(self, level, event: str, *args, **kw) -> None:
"""Log using log(), but asynchronously in a separate thread."""Factory for creating standard library logger instances that work with structlog.
class LoggerFactory:
"""Factory for creating stdlib logger instances."""
def __call__(self, *args) -> logging.Logger:
"""
Create a stdlib logger instance.
Args:
*args: Arguments passed to logging.getLogger()
Returns:
logging.Logger: Standard library logger instance
"""ProcessorFormatter allows structlog processors to be used within the standard logging framework.
class ProcessorFormatter(logging.Formatter):
"""
Logging formatter that processes log records through structlog processors.
Bridges stdlib logging and structlog by converting LogRecord objects
to structlog event dictionaries and processing them through processors.
"""
def __init__(
self,
processor,
foreign_pre_chain=None,
keep_exc_info=False,
keep_stack_info=False
):
"""
Args:
processor (callable): Final processor to render the log record
foreign_pre_chain (list, optional): Processors to run before main processor
keep_exc_info (bool): Keep exc_info in record after processing
keep_stack_info (bool): Keep stack_info in record after processing
"""
def format(self, record) -> str: ...Processors for handling stdlib-style logging arguments and record attributes.
class PositionalArgumentsFormatter:
"""Handle stdlib-style positional arguments in log messages."""
def __call__(self, logger, name, event_dict) -> EventDict:
"""
Process positional arguments like stdlib logging.
Handles cases where event_dict contains positional args
that need to be formatted into the message string.
"""
class ExtraAdder:
"""Add logging record's extra fields to structlog event dictionary."""
def __init__(self, names):
"""
Args:
names (set): Set of field names to extract from LogRecord.extra
"""
def __call__(self, logger, name, event_dict) -> EventDict: ...Functions for working with standard library logging integration.
def recreate_defaults(*, log_level=logging.NOTSET) -> None:
"""
Recreate structlog defaults using stdlib logging.
Sets up structlog to work with Python's standard logging module
by configuring appropriate defaults.
Args:
log_level: Minimum log level for filtering
"""
def get_logger(name=None) -> BoundLogger:
"""
Get a stdlib-based structlog logger.
Args:
name (str, optional): Logger name for stdlib logger hierarchy
Returns:
BoundLogger: Stdlib-compatible structlog logger
"""Processors specifically designed for stdlib logging integration.
def add_log_level(logger, method_name, event_dict) -> EventDict:
"""
Add log level name to event dictionary.
Adds 'level' key with the log level name (debug, info, warning, etc.)
based on the method name used to call the logger.
"""
def add_log_level_number(logger, method_name, event_dict) -> EventDict:
"""
Add numeric log level to event dictionary.
Adds 'level' key with the numeric log level (10=DEBUG, 20=INFO, etc.)
based on the method name used to call the logger.
"""
def add_logger_name(logger, method_name, event_dict) -> EventDict:
"""
Add logger name to event dictionary.
Adds 'logger' key with the name of the stdlib logger instance.
"""
def filter_by_level(logger, method_name, event_dict) -> EventDict:
"""
Filter log events by log level.
Drops events that don't meet the minimum log level threshold
by raising DropEvent exception.
"""
def render_to_log_kwargs(logger, method_name, event_dict) -> dict[str, Any]:
"""
Render event dict to stdlib logging keyword arguments.
Converts structlog event dictionary to kwargs suitable for
passing to stdlib logging methods.
"""
def render_to_log_args_and_kwargs(logger, method_name, event_dict) -> tuple[tuple[Any, ...], dict[str, Any]]:
"""
Render event dict to stdlib logging args and kwargs.
Converts structlog event dictionary to positional args and kwargs
suitable for stdlib logging methods.
"""import logging
import structlog
from structlog import stdlib
# Configure Python logging
logging.basicConfig(
level=logging.INFO,
format="%(message)s",
)
# Configure structlog to use stdlib
structlog.configure(
processors=[
stdlib.add_log_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.JSONRenderer()
],
wrapper_class=stdlib.BoundLogger,
logger_factory=stdlib.LoggerFactory(),
context_class=dict,
cache_logger_on_first_use=True,
)
# Get a logger
logger = structlog.get_logger("myapp")
# Use both structlog and stdlib features
logger.info("Application started", version="1.0.0")
logger.bind(user_id=123).warning("User action", action="delete")import logging
import structlog
from structlog import stdlib, processors
# Set up a stdlib handler with ProcessorFormatter
handler = logging.StreamHandler()
handler.setFormatter(
stdlib.ProcessorFormatter(
processor=processors.JSONRenderer(),
foreign_pre_chain=[
processors.TimeStamper(),
stdlib.add_log_level,
],
)
)
# Configure stdlib logging
root = logging.getLogger()
root.addHandler(handler)
root.setLevel(logging.INFO)
# Configure structlog to forward to stdlib
structlog.configure(
processors=[
stdlib.render_to_log_kwargs,
],
wrapper_class=stdlib.BoundLogger,
logger_factory=stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
logger = structlog.get_logger("myapp")
logger.info("Processed through stdlib", count=42)import logging
import structlog
from structlog import stdlib, processors
# Configure stdlib for file output
file_handler = logging.FileHandler("app.log")
file_handler.setFormatter(
stdlib.ProcessorFormatter(
processor=processors.JSONRenderer(),
foreign_pre_chain=[
processors.TimeStamper(fmt="iso"),
stdlib.add_log_level,
stdlib.add_logger_name,
]
)
)
# Configure stdlib for console output
console_handler = logging.StreamHandler()
console_handler.setFormatter(
stdlib.ProcessorFormatter(
processor=structlog.dev.ConsoleRenderer(colors=True)
)
)
# Set up root logger
logging.basicConfig(
level=logging.INFO,
handlers=[file_handler, console_handler]
)
# Configure structlog
structlog.configure(
processors=[
stdlib.render_to_log_kwargs,
],
wrapper_class=stdlib.BoundLogger,
logger_factory=stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
logger = structlog.get_logger("myapp.service")
logger.info("Service started", port=8080)
# Logs to both file (JSON) and console (colored)import logging
import structlog
from structlog import stdlib, processors
# Configure with level filtering
structlog.configure(
processors=[
stdlib.filter_by_level, # Filter by level first
processors.TimeStamper(),
stdlib.add_log_level,
processors.JSONRenderer()
],
wrapper_class=stdlib.BoundLogger,
logger_factory=stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
# Set logging level
logging.basicConfig(level=logging.WARNING)
logger = structlog.get_logger()
logger.debug("Debug message") # Filtered out
logger.info("Info message") # Filtered out
logger.warning("Warning message") # Shown
logger.error("Error message") # Shownimport logging
import structlog
from structlog import stdlib
# Existing stdlib logging configuration
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
# Add structlog on top
stdlib.recreate_defaults(log_level=logging.INFO)
# Get logger that works with existing setup
logger = stdlib.get_logger("legacy.module")
# Works with existing handlers and formatters
logger.info("Legacy integration working", module="auth")import structlog
from structlog import stdlib
structlog.configure(
processors=[
stdlib.add_logger_name,
stdlib.add_log_level,
structlog.processors.JSONRenderer()
],
wrapper_class=stdlib.BoundLogger,
logger_factory=stdlib.LoggerFactory(),
)
# Create loggers with specific names
auth_logger = structlog.get_logger("myapp.auth")
db_logger = structlog.get_logger("myapp.database")
api_logger = structlog.get_logger("myapp.api")
auth_logger.info("User authenticated")
db_logger.info("Query executed", table="users")
api_logger.info("Request processed", endpoint="/users")
# Each includes the logger name in outputimport asyncio
import structlog
from structlog import stdlib
structlog.configure(
processors=[
stdlib.add_log_level,
structlog.processors.JSONRenderer()
],
wrapper_class=stdlib.BoundLogger,
logger_factory=stdlib.LoggerFactory(),
)
async def async_operation():
logger = structlog.get_logger()
# Use async logging methods
await logger.ainfo("Async operation started")
# Simulate async work
await asyncio.sleep(0.1)
await logger.ainfo("Async operation completed", duration="0.1s")
# Run async operation
asyncio.run(async_operation())Install with Tessl CLI
npx tessl i tessl/pypi-structlog