Version 0.4.0
All checks were successful
Run linters on applied template / Python 3.13 lint and build (push) Successful in 1m40s

Changes:
- put ObservabilityMiddleware before ExceptionHandlerMiddleware to avoid repetative code
- add application startup and last metrics update metrics along with CPU usage metric and threads count
- move host and port to new uvicorn section at config along with new reload and forwarded_allow_ips
- add request_id and remove trace_id/span_id generation if tracing is disabled
- move logging logic from utils to observability
- pass trace_id/span_id in HEX form
This commit is contained in:
2026-01-03 11:01:43 +03:00
parent b8acb017fd
commit 53f14a8624
26 changed files with 901 additions and 730 deletions

View File

@@ -0,0 +1,165 @@
"""Observability helper functions are defined here."""
import json
import logging
import sys
from pathlib import Path
from typing import Any
import structlog
from opentelemetry import trace
from opentelemetry._logs import set_logger_provider
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter
from opentelemetry.sdk._logs import (
LoggerProvider,
LoggingHandler,
LogRecordProcessor,
ReadWriteLogRecord,
)
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
from opentelemetry.util.types import Attributes
from {{project_slug}}.observability.otel_agent import get_resource
from .config import ExporterConfig, FileLogger, LoggingConfig, LoggingLevel
def configure_logging(
config: LoggingConfig,
tracing_enabled: bool,
) -> structlog.stdlib.BoundLogger:
processors = [
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
]
if tracing_enabled:
processors.insert(len(processors) - 1, _add_open_telemetry_spans)
structlog.configure(
processors=processors,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
root_logger = logging.getLogger()
root_logger.setLevel(config.root_logger_level)
if config.stderr_level is not None:
_configure_stderr_logger(root_logger, config.stderr_level)
if len(config.files) > 0:
_configure_file_loggers(root_logger, config.files)
if config.exporter is not None:
_configure_otel_exporter(root_logger, config.exporter)
logger: structlog.stdlib.BoundLogger = structlog.get_logger("{{project_name}}")
logger.setLevel(_level_name_mapping[config.root_logger_level])
return logger
_level_name_mapping: dict[LoggingLevel, int] = {
"DEBUG": logging.DEBUG,
"INFO": logging.INFO,
"WARNING": logging.WARNING,
"ERROR": logging.ERROR,
"CRITICAL": logging.CRITICAL,
}
def _configure_stderr_logger(root_logger: logging.Logger, level: LoggingLevel) -> None:
stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setFormatter(
structlog.stdlib.ProcessorFormatter(processor=structlog.dev.ConsoleRenderer(colors=True))
)
stderr_handler.setLevel(_level_name_mapping[level])
root_logger.addHandler(stderr_handler)
def _configure_file_loggers(root_logger: logging.Logger, config_files: list[FileLogger]) -> None:
files = {logger_config.filename: logger_config.level for logger_config in config_files}
for filename, level in files.items():
try:
Path(filename).parent.mkdir(parents=True, exist_ok=True)
except Exception as exc: # pylint: disable=broad-except
print(f"Cannot create directory for log file {filename}, application will crash most likely. {exc!r}")
file_handler = logging.FileHandler(filename=filename, encoding="utf-8")
file_handler.setFormatter(structlog.stdlib.ProcessorFormatter(processor=structlog.processors.JSONRenderer()))
file_handler.setLevel(_level_name_mapping[level])
root_logger.addHandler(file_handler)
def _configure_otel_exporter(root_logger: logging.Logger, config: ExporterConfig) -> None:
logger_provider = LoggerProvider(resource=get_resource())
set_logger_provider(logger_provider)
otlp_exporter = OTLPLogExporter(endpoint=config.endpoint, insecure=config.tls_insecure)
logger_provider.add_log_record_processor(OtelLogPreparationProcessor())
logger_provider.add_log_record_processor(BatchLogRecordProcessor(otlp_exporter))
exporter_handler = AttrFilteredLoggingHandler(
level=config.level,
logger_provider=logger_provider,
)
exporter_handler.setLevel(_level_name_mapping[config.level])
root_logger.addHandler(exporter_handler)
def _add_open_telemetry_spans(_, __, event_dict: dict):
span = trace.get_current_span()
if not span or not span.is_recording():
return event_dict
ctx = span.get_span_context()
event_dict["span_id"] = format(ctx.span_id, "016x")
event_dict["trace_id"] = format(ctx.trace_id, "032x")
return event_dict
class AttrFilteredLoggingHandler(LoggingHandler):
DROP_ATTRIBUTES = ["_logger"]
@staticmethod
def _get_attributes(record: logging.LogRecord) -> Attributes:
attributes = LoggingHandler._get_attributes(record)
for attr in AttrFilteredLoggingHandler.DROP_ATTRIBUTES:
if attr in attributes:
del attributes[attr]
return attributes
class OtelLogPreparationProcessor(LogRecordProcessor):
"""Processor which moves everything except message from log record body to attributes."""
def on_emit(self, log_record: ReadWriteLogRecord) -> None:
if not isinstance(log_record.log_record.body, dict):
return
for key in log_record.log_record.body:
if key == "event":
continue
save_key = key
if key in log_record.log_record.attributes:
save_key = f"{key}__body"
log_record.log_record.attributes[save_key] = self._format_value(log_record.log_record.body[key])
log_record.log_record.body = log_record.log_record.body["event"]
def _format_value(self, value: Any) -> str:
if isinstance(value, (dict, list)):
return json.dumps(value)
return str(value)
def force_flush(self, timeout_millis=30000):
pass
def shutdown(self):
pass