Version 0.3.0
Some checks failed
Run linters on applied template / Python 3.13 lint and build (push) Failing after 2m36s

Changes:
- fix double exception message in main request_processing span
- add OpenSearch to Jaeger and OpenTelemetry Logs
- add optional OpenTelemetry Logs Exporter to structlog
- update deploy README
This commit is contained in:
2025-12-06 19:41:33 +03:00
parent 34c1347402
commit b8acb017fd
16 changed files with 966 additions and 450 deletions

View File

@@ -1,20 +1,56 @@
"""Observability helper functions are defined here."""
import logging
import platform
import re
import sys
from dataclasses import dataclass, field
from pathlib import Path
from typing import Literal
import structlog
from opentelemetry import trace
from opentelemetry._logs import set_logger_provider
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
from opentelemetry.sdk.resources import Resource
from opentelemetry.util.types import Attributes
LoggingLevel = Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
@dataclass
class ExporterConfig:
endpoint: str
level: LoggingLevel = "INFO"
tls_insecure: bool = False
@dataclass
class FileLogger:
filename: str
level: LoggingLevel
@dataclass
class LoggingConfig:
level: LoggingLevel
exporter: ExporterConfig | None
root_logger_level: LoggingLevel = "INFO"
files: list[FileLogger] = field(default_factory=list)
def __post_init__(self):
if len(self.files) > 0 and isinstance(self.files[0], dict):
self.files = [FileLogger(**f) for f in self.files]
def configure_logging(
log_level: LoggingLevel, files: dict[str, LoggingLevel] | None = None, root_logger_level: LoggingLevel = "INFO"
config: LoggingConfig,
tracing_enabled: bool,
) -> structlog.stdlib.BoundLogger:
files = {logger_config.filename: logger_config.level for logger_config in config.files}
level_name_mapping = {
"DEBUG": logging.DEBUG,
"INFO": logging.INFO,
@@ -22,25 +58,44 @@ def configure_logging(
"ERROR": logging.ERROR,
"CRITICAL": logging.CRITICAL,
}
files = files or {}
log_level = level_name_mapping[config.level]
processors = [
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
]
if tracing_enabled:
def add_open_telemetry_spans(_, __, event_dict: dict):
span = trace.get_current_span()
if not span or not span.is_recording():
return event_dict
ctx = span.get_span_context()
event_dict["span_id"] = format(ctx.span_id, "016x")
event_dict["trace_id"] = format(ctx.trace_id, "032x")
return event_dict
processors.insert(len(processors) - 1, add_open_telemetry_spans)
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
processors=processors,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
logger: structlog.stdlib.BoundLogger = structlog.get_logger("main")
logger.setLevel(level_name_mapping[log_level])
logger.setLevel(log_level)
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(
@@ -60,7 +115,29 @@ def configure_logging(
file_handler.setLevel(level_name_mapping[level])
root_logger.addHandler(file_handler)
root_logger.setLevel(root_logger_level)
root_logger.setLevel(config.root_logger_level)
if config.exporter is not None:
logger_provider = LoggerProvider(
resource=Resource.create(
{
"service.name": "{{project_name}}",
"service.instance.id": platform.node(),
}
),
)
set_logger_provider(logger_provider)
otlp_exporter = OTLPLogExporter(endpoint=config.exporter.endpoint, insecure=config.exporter.tls_insecure)
logger_provider.add_log_record_processor(BatchLogRecordProcessor(otlp_exporter))
exporter_handler = AttrFilteredLoggingHandler(
level=config.exporter.level,
logger_provider=logger_provider,
)
# exporter_handler.setFormatter(structlog.stdlib.ProcessorFormatter(processor=structlog.processors.JSONRenderer()))
exporter_handler.setLevel(level_name_mapping[config.exporter.level])
logger.addHandler(exporter_handler)
return logger
@@ -102,3 +179,15 @@ def get_span_headers() -> dict[str, str]:
"X-Span-Id": str(ctx.span_id),
"X-Trace-Id": str(ctx.trace_id),
}
class AttrFilteredLoggingHandler(LoggingHandler):
DROP_ATTRIBUTES = ["_logger"]
@staticmethod
def _get_attributes(record: logging.LogRecord) -> Attributes:
attributes = LoggingHandler._get_attributes(record)
for attr in AttrFilteredLoggingHandler.DROP_ATTRIBUTES:
if attr in attributes:
del attributes[attr]
return attributes