feat: Implement structured JSON logging and performance tracking features

This commit is contained in:
claudi 2026-01-29 10:25:54 +01:00
parent db3799a643
commit 5dc988005c
3 changed files with 370 additions and 12 deletions

View file

@ -1,9 +1,74 @@
"""Logging configuration and utilities for WebDrop Bridge."""
import json
import logging
import logging.handlers
import time
from datetime import datetime, timedelta
from pathlib import Path
from typing import Optional
from typing import Any, Dict, Optional
class JSONFormatter(logging.Formatter):
"""Custom JSON formatter for structured logging.
Formats log records as JSON for better parsing and analysis.
Includes timestamp, level, message, module, and optional context.
"""
def format(self, record: logging.LogRecord) -> str:
"""Format log record as JSON string.
Args:
record: LogRecord to format
Returns:
JSON string containing log data
"""
log_data: Dict[str, Any] = {
"timestamp": datetime.fromtimestamp(record.created).isoformat(),
"level": record.levelname,
"logger": record.name,
"message": record.getMessage(),
"module": record.module,
"function": record.funcName,
"line": record.lineno,
}
# Add exception info if present
if record.exc_info:
log_data["exception"] = self.formatException(record.exc_info)
# Add any extra context from the LogRecord
# Attributes added via record.__dict__['key'] = value
for key, value in record.__dict__.items():
if key not in (
"name",
"msg",
"args",
"created",
"filename",
"funcName",
"levelname",
"levelno",
"lineno",
"module",
"msecs",
"message",
"pathname",
"process",
"processName",
"relativeCreated",
"thread",
"threadName",
"exc_info",
"exc_text",
"stack_info",
):
log_data[key] = value
return json.dumps(log_data, default=str)
def setup_logging(
@ -11,6 +76,7 @@ def setup_logging(
level: str = "INFO",
log_file: Optional[Path] = None,
fmt: Optional[str] = None,
json_format: bool = False,
) -> logging.Logger:
"""Configure application-wide logging.
@ -24,6 +90,7 @@ def setup_logging(
to this file in addition to console
fmt: Optional custom format string. If None, uses default format.
Default: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
json_format: If True, use JSON format for logs. Ignores fmt parameter.
Returns:
logging.Logger: Configured logger instance
@ -38,12 +105,14 @@ def setup_logging(
except AttributeError as e:
raise KeyError(f"Invalid logging level: {level}") from e
# Use default format if not provided
if fmt is None:
fmt = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# Create formatter
formatter = logging.Formatter(fmt)
# Create formatter based on format type
if json_format:
formatter = JSONFormatter()
else:
# Use default format if not provided
if fmt is None:
fmt = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
formatter = logging.Formatter(fmt)
# Get or create logger
logger = logging.getLogger(name)
@ -64,6 +133,9 @@ def setup_logging(
# Create parent directories if needed
log_file.parent.mkdir(parents=True, exist_ok=True)
# Archive old logs before creating new handler
_archive_old_logs(log_file)
# Use rotating file handler to manage log file size
# Max 10 MB per file, keep 5 backups
file_handler = logging.handlers.RotatingFileHandler(
@ -98,3 +170,90 @@ def get_logger(name: str = __name__) -> logging.Logger:
logging.Logger: Logger instance for the given name
"""
return logging.getLogger(name)
def _archive_old_logs(log_file: Path, retention_days: int = 30) -> None:
"""Archive logs older than retention period.
Removes log files older than the specified retention period.
Called automatically by setup_logging.
Args:
log_file: Path to the current log file
retention_days: Number of days to keep old logs (default: 30)
"""
if not log_file.parent.exists():
return
now = datetime.now()
cutoff = now - timedelta(days=retention_days)
# Check for backup log files (*.log.1, *.log.2, etc.)
for log_path in log_file.parent.glob(f"{log_file.name}.*"):
try:
# Get file modification time
mtime = datetime.fromtimestamp(log_path.stat().st_mtime)
if mtime < cutoff:
log_path.unlink()
except (OSError, IOError):
# Silently skip if we can't delete
pass
class PerformanceTracker:
"""Track performance metrics for application operations.
Provides context manager interface for timing code blocks
and logging performance data.
Example:
with PerformanceTracker("drag_operation") as tracker:
# Your code here
pass
# Logs elapsed time automatically
"""
def __init__(self, operation_name: str, logger: Optional[logging.Logger] = None):
"""Initialize performance tracker.
Args:
operation_name: Name of the operation being tracked
logger: Logger instance to use (uses root logger if None)
"""
self.operation_name = operation_name
self.logger = logger or logging.getLogger("webdrop_bridge")
self.start_time: Optional[float] = None
self.elapsed_time: float = 0.0
def __enter__(self) -> "PerformanceTracker":
"""Enter context manager."""
self.start_time = time.time()
self.logger.debug(f"Starting: {self.operation_name}")
return self
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
"""Exit context manager and log elapsed time."""
if self.start_time is not None:
self.elapsed_time = time.time() - self.start_time
# Log with appropriate level based on execution
if exc_type is not None:
self.logger.warning(
f"Completed (with error): {self.operation_name}",
extra={"duration_seconds": self.elapsed_time, "error": str(exc_val)},
)
else:
self.logger.debug(
f"Completed: {self.operation_name}",
extra={"duration_seconds": self.elapsed_time},
)
def get_elapsed(self) -> float:
"""Get elapsed time in seconds.
Returns:
Elapsed time or 0 if context not yet exited
"""
if self.start_time is None:
return 0.0
return time.time() - self.start_time