Coverage for packages/core/src/langgate/core/logging.py: 69%
64 statements
« prev ^ index » next coverage.py v7.7.1, created at 2025-04-09 21:23 +0000
« prev ^ index » next coverage.py v7.7.1, created at 2025-04-09 21:23 +0000
1"""Shared logging utilities for LangGate."""
3import logging
4import os
5from collections.abc import Callable, Iterator, Mapping, MutableMapping
6from contextlib import contextmanager
7from logging import StreamHandler
8from typing import Any, TextIO, cast
10import structlog
13class StructLogger(structlog.stdlib.BoundLogger):
14 """Custom logger class for structured logging."""
17# Default to info if not specified
18log_level_str = os.getenv("LOG_LEVEL", "info").lower()
19LOG_LEVELS = {
20 "critical": logging.CRITICAL,
21 "error": logging.ERROR,
22 "warning": logging.WARNING,
23 "info": logging.INFO,
24 "debug": logging.DEBUG,
25}
26log_level = LOG_LEVELS.get(log_level_str, logging.INFO)
28ProcessorType = Callable[[Any, str, MutableMapping[str, Any]], Mapping[str, Any]]
31class MessageIsNormal(logging.Filter):
32 def filter(self, record):
33 return record.levelno < logging.ERROR
36# logs INFO and WARNING to stdout
37std_out = logging.StreamHandler()
38std_out.setLevel(logging.DEBUG)
39std_out.addFilter(MessageIsNormal())
41# logs ERROR and CRITICAL to stderr
42std_err = logging.StreamHandler()
43std_err.setLevel(logging.ERROR)
45handlers = [std_out, std_err]
47# Modules to suppress DEBUG logs for
48info_level_modules = ["httpx"]
51def configure_logger(
52 json_logs: bool = False, handlers: list[StreamHandler[TextIO]] = handlers
53):
54 """Configure structlog for the application.
56 Args:
57 json_logs: Whether to output logs in JSON format (useful for production)
58 handlers: List of log handlers to use
59 """
60 timestamper = structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S")
62 logger = structlog.stdlib.get_logger()
63 logger.info(
64 "logging_configured",
65 log_level=log_level_str.upper(),
66 json_logs=json_logs,
67 py_level=log_level,
68 )
70 shared_processors: list[ProcessorType] = [
71 timestamper,
72 structlog.stdlib.add_log_level,
73 structlog.stdlib.add_logger_name,
74 structlog.contextvars.merge_contextvars,
75 structlog.processors.CallsiteParameterAdder(
76 {
77 structlog.processors.CallsiteParameter.PATHNAME,
78 structlog.processors.CallsiteParameter.FILENAME,
79 structlog.processors.CallsiteParameter.MODULE,
80 structlog.processors.CallsiteParameter.FUNC_NAME,
81 structlog.processors.CallsiteParameter.THREAD,
82 structlog.processors.CallsiteParameter.THREAD_NAME,
83 structlog.processors.CallsiteParameter.PROCESS,
84 structlog.processors.CallsiteParameter.PROCESS_NAME,
85 }
86 ),
87 structlog.stdlib.ExtraAdder(),
88 ]
90 structlog.configure(
91 processors=shared_processors
92 + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
93 logger_factory=structlog.stdlib.LoggerFactory(),
94 wrapper_class=StructLogger,
95 cache_logger_on_first_use=True,
96 )
98 logs_render = (
99 structlog.processors.JSONRenderer()
100 if json_logs
101 else structlog.dev.ConsoleRenderer(colors=True)
102 )
104 _configure_default_logging_by_custom(shared_processors, logs_render, handlers)
107def _configure_default_logging_by_custom(
108 shared_processors, logs_render, handlers: list[StreamHandler[TextIO]]
109):
110 """Configure default logging with custom settings."""
111 logging.basicConfig(format="%(message)s", level=log_level, handlers=handlers)
113 if log_level == logging.DEBUG:
114 # suppress DEBUG logs for some modules if log level is DEBUG
115 for module in info_level_modules:
116 logging.getLogger(module).setLevel(logging.INFO)
118 # Use `ProcessorFormatter` to format all `logging` entries.
119 formatter = structlog.stdlib.ProcessorFormatter(
120 foreign_pre_chain=shared_processors,
121 processors=[
122 _extract_from_record,
123 structlog.stdlib.ProcessorFormatter.remove_processors_meta,
124 logs_render,
125 ],
126 )
128 for handler in handlers:
129 handler.setFormatter(formatter)
132def _extract_from_record(_, __, event_dict):
133 """Extract thread and process names from record."""
134 record = event_dict["_record"]
135 event_dict["thread_name"] = record.threadName
136 event_dict["process_name"] = record.processName
137 return event_dict
140@contextmanager
141def structlog_contextvars_context(
142 context_to_keep: tuple[str, ...] | str = "",
143) -> Iterator[dict[str, Any]]:
144 """Set fresh context for the duration of a context scope."""
145 context = structlog.contextvars.get_contextvars().copy()
147 relevant_context = (
148 {k: v for k, v in context.items() if k in context_to_keep}
149 if context_to_keep
150 else {}
151 )
152 structlog.contextvars.clear_contextvars()
153 try:
154 yield relevant_context
155 finally:
156 structlog.contextvars.clear_contextvars()
157 structlog.contextvars.bind_contextvars(**context)
160@contextmanager
161def structlog_add_context(context: dict[str, Any]):
162 """Add context to structlog for the duration of a context scope."""
163 old_context = structlog.contextvars.get_contextvars().copy()
164 try:
165 structlog.contextvars.bind_contextvars(**context)
166 yield
167 finally:
168 structlog.contextvars.clear_contextvars()
169 structlog.contextvars.bind_contextvars(**old_context)
172def is_debug() -> bool:
173 """Check if log level is set to DEBUG."""
174 return log_level == logging.DEBUG
177def get_logger(name: str) -> StructLogger:
178 """Get a configured logger instance.
180 Args:
181 name: The name of the logger, typically __name__
183 Returns:
184 A structured logger instance
185 """
186 return cast(StructLogger, structlog.stdlib.get_logger(name))