feat: Implement JSON logging with request context and centralize logging configuration.
parent
d694dafa8f
commit
abb7e8d27b
@ -0,0 +1,18 @@
|
||||
from contextvars import ContextVar
|
||||
from typing import Optional, Final
|
||||
|
||||
REQUEST_ID_CTX_KEY: Final[str] = "request_id"
|
||||
_request_id_ctx_var: ContextVar[Optional[str]] = ContextVar(
|
||||
REQUEST_ID_CTX_KEY, default=None)
|
||||
|
||||
|
||||
def get_request_id() -> Optional[str]:
|
||||
return _request_id_ctx_var.get()
|
||||
|
||||
|
||||
def set_request_id(request_id: str):
|
||||
return _request_id_ctx_var.set(request_id)
|
||||
|
||||
|
||||
def reset_request_id(token):
|
||||
_request_id_ctx_var.reset(token)
|
||||
@ -1,16 +1,129 @@
|
||||
import logging
|
||||
import json
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
from fastapi import FastAPI
|
||||
from typing import Optional
|
||||
|
||||
from src.config import LOG_LEVEL
|
||||
from src.enums import RBDEnum
|
||||
|
||||
|
||||
LOG_FORMAT_DEBUG = "%(levelname)s:%(message)s:%(pathname)s:%(funcName)s:%(lineno)d"
|
||||
|
||||
# ANSI Color Codes
|
||||
RESET = "\033[0m"
|
||||
COLORS = {
|
||||
"DEBUG": "\033[36m", # Cyan
|
||||
"INFO": "\033[32m", # Green
|
||||
"WARNING": "\033[33m", # Yellow
|
||||
"WARN": "\033[33m", # Yellow
|
||||
"ERROR": "\033[31m", # Red
|
||||
"CRITICAL": "\033[1;31m", # Bold Red
|
||||
}
|
||||
|
||||
|
||||
class LogLevels(RBDEnum):
|
||||
info = "INFO"
|
||||
warn = "WARN"
|
||||
error = "ERROR"
|
||||
debug = "DEBUG"
|
||||
|
||||
|
||||
class JSONFormatter(logging.Formatter):
|
||||
"""
|
||||
Custom formatter to output logs in JSON format.
|
||||
"""
|
||||
def format(self, record):
|
||||
from src.context import get_request_id
|
||||
|
||||
|
||||
request_id = None
|
||||
try:
|
||||
request_id = get_request_id()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log_record = {
|
||||
"timestamp": datetime.datetime.fromtimestamp(record.created).astimezone().isoformat(),
|
||||
"level": record.levelname,
|
||||
"message": record.getMessage(),
|
||||
"logger_name": record.name,
|
||||
"location": f"{record.module}:{record.funcName}:{record.lineno}",
|
||||
"module": record.module,
|
||||
"funcName": record.funcName,
|
||||
"lineno": record.lineno,
|
||||
"pid": os.getpid(),
|
||||
"request_id": request_id,
|
||||
}
|
||||
|
||||
|
||||
# Capture exception info if available
|
||||
if record.exc_info:
|
||||
log_record["exception"] = self.formatException(record.exc_info)
|
||||
|
||||
# Capture stack info if available
|
||||
if record.stack_info:
|
||||
log_record["stack_trace"] = self.formatStack(record.stack_info)
|
||||
|
||||
# Add any extra attributes passed to the log call
|
||||
# We skip standard attributes to avoid duplication
|
||||
standard_attrs = {
|
||||
"args", "asctime", "created", "exc_info", "exc_text", "filename",
|
||||
"funcName", "levelname", "levelno", "lineno", "module", "msecs",
|
||||
"message", "msg", "name", "pathname", "process", "processName",
|
||||
"relativeCreated", "stack_info", "thread", "threadName"
|
||||
}
|
||||
for key, value in record.__dict__.items():
|
||||
if key not in standard_attrs:
|
||||
log_record[key] = value
|
||||
|
||||
log_json = json.dumps(log_record)
|
||||
|
||||
# Apply color if the output is a terminal
|
||||
if sys.stdout.isatty():
|
||||
level_color = COLORS.get(record.levelname, "")
|
||||
return f"{level_color}{log_json}{RESET}"
|
||||
|
||||
return log_json
|
||||
|
||||
|
||||
def configure_logging():
|
||||
log_level = str(LOG_LEVEL).upper() # cast to string
|
||||
log_levels = list(LogLevels)
|
||||
|
||||
if log_level not in log_levels:
|
||||
log_level = LogLevels.error
|
||||
|
||||
# Get the root logger
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(log_level)
|
||||
|
||||
# Clear existing handlers to avoid duplicate logs
|
||||
if root_logger.hasHandlers():
|
||||
root_logger.handlers.clear()
|
||||
|
||||
# Create a stream handler that outputs to stdout
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
|
||||
# Use JSONFormatter for all environments, or could be conditional
|
||||
# For now, let's assume the user wants JSON everywhere as requested
|
||||
formatter = JSONFormatter()
|
||||
|
||||
# If debug mode is specifically requested and we want the old format for debug:
|
||||
# if log_level == LogLevels.debug:
|
||||
# formatter = logging.Formatter(LOG_FORMAT_DEBUG)
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
# Reconfigure uvicorn loggers to use our JSON formatter
|
||||
for logger_name in ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"]:
|
||||
logger = logging.getLogger(logger_name)
|
||||
logger.handlers = []
|
||||
logger.propagate = True
|
||||
|
||||
def setup_logging(logger):
|
||||
# Your logging configuration here
|
||||
logger.setLevel(logging.DEBUG)
|
||||
# Create formatter
|
||||
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
# sometimes the slack client can be too verbose
|
||||
logging.getLogger("slack_sdk.web.base_client").setLevel(logging.CRITICAL)
|
||||
|
||||
# Create console handler
|
||||
stream_handler = logging.StreamHandler(sys.stdout)
|
||||
stream_handler.setFormatter(formatter)
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
Loading…
Reference in New Issue