Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ dependencies = [
"gunicorn>=23.0.0",
"ruff>=0.11.13",
"mypy>=1.16.0",
"structlog>=25.1.0,<=25.5.0",
"rich>=14.2.0",
]

[project.optional-dependencies]
Expand Down
28 changes: 28 additions & 0 deletions src/app/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,32 @@ class CryptSettings(BaseSettings):
REFRESH_TOKEN_EXPIRE_DAYS: int = 7


class FileLoggerSettings(BaseSettings):
FILE_LOG_MAX_BYTES: int = 10 * 1024 * 1024
FILE_LOG_BACKUP_COUNT: int = 5
FILE_LOG_FORMAT_JSON: bool = True
FILE_LOG_LEVEL: str = "INFO"

# Include request ID, path, method, client host, and status code in the file log
FILE_LOG_INCLUDE_REQUEST_ID: bool = True
FILE_LOG_INCLUDE_PATH: bool = True
FILE_LOG_INCLUDE_METHOD: bool = True
FILE_LOG_INCLUDE_CLIENT_HOST: bool = True
FILE_LOG_INCLUDE_STATUS_CODE: bool = True


class ConsoleLoggerSettings(BaseSettings):
CONSOLE_LOG_LEVEL: str = "INFO"
CONSOLE_LOG_FORMAT_JSON: bool = False

# Include request ID, path, method, client host, and status code in the console log
CONSOLE_LOG_INCLUDE_REQUEST_ID: bool = False
CONSOLE_LOG_INCLUDE_PATH: bool = False
CONSOLE_LOG_INCLUDE_METHOD: bool = False
CONSOLE_LOG_INCLUDE_CLIENT_HOST: bool = False
CONSOLE_LOG_INCLUDE_STATUS_CODE: bool = False


class DatabaseSettings(BaseSettings):
pass

Expand Down Expand Up @@ -164,6 +190,8 @@ class Settings(
CRUDAdminSettings,
EnvironmentSettings,
CORSSettings,
FileLoggerSettings,
ConsoleLoggerSettings,
):
model_config = SettingsConfigDict(
env_file=os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", ".env"),
Expand Down
132 changes: 122 additions & 10 deletions src/app/core/logger.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,132 @@
"""Logging configuration for the application."""

import logging
import os
from logging.handlers import RotatingFileHandler

import structlog
from structlog.dev import ConsoleRenderer
from structlog.processors import JSONRenderer
from structlog.types import EventDict, Processor

from ..core.config import settings


def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict:
"""Uvicorn adds `color_message` which duplicates `event`.

Remove it to avoid double logging.
"""
event_dict.pop("color_message", None)
return event_dict


def file_log_filter_processors(_, __, event_dict: EventDict) -> EventDict:
"""Filter out the request ID, path, method, client host, and status code from the event dict if the
corresponding setting is False."""

if not settings.FILE_LOG_INCLUDE_REQUEST_ID:
event_dict.pop("request_id", None)
if not settings.FILE_LOG_INCLUDE_PATH:
event_dict.pop("path", None)
if not settings.FILE_LOG_INCLUDE_METHOD:
event_dict.pop("method", None)
if not settings.FILE_LOG_INCLUDE_CLIENT_HOST:
event_dict.pop("client_host", None)
if not settings.FILE_LOG_INCLUDE_STATUS_CODE:
event_dict.pop("status_code", None)
return event_dict


def console_log_filter_processors(_, __, event_dict: EventDict) -> EventDict:
"""Filter out the request ID, path, method, client host, and status code from the event dict if the
corresponding setting is False."""

if not settings.CONSOLE_LOG_INCLUDE_REQUEST_ID:
event_dict.pop("request_id", None)
if not settings.CONSOLE_LOG_INCLUDE_PATH:
event_dict.pop("path", None)
if not settings.CONSOLE_LOG_INCLUDE_METHOD:
event_dict.pop("method", None)
if not settings.CONSOLE_LOG_INCLUDE_CLIENT_HOST:
event_dict.pop("client_host", None)
if not settings.CONSOLE_LOG_INCLUDE_STATUS_CODE:
event_dict.pop("status_code", None)
return event_dict


# Shared processors for all loggers
timestamper = structlog.processors.TimeStamper(fmt="iso")
SHARED_PROCESSORS: list[Processor] = [
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.stdlib.ExtraAdder(),
drop_color_message_key,
timestamper,
structlog.processors.StackInfoRenderer(),
]


# Configure structlog globally
structlog.configure(
processors=SHARED_PROCESSORS + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)


def build_formatter(*, json_output: bool, pre_chain: list[Processor]) -> structlog.stdlib.ProcessorFormatter:
"""Build a ProcessorFormatter with the specified renderer and processors."""
renderer = JSONRenderer() if json_output else ConsoleRenderer()

processors = [structlog.stdlib.ProcessorFormatter.remove_processors_meta, renderer]

if json_output:
pre_chain = pre_chain + [structlog.processors.format_exc_info]

return structlog.stdlib.ProcessorFormatter(foreign_pre_chain=pre_chain, processors=processors)


# Setup log directory
LOG_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs")
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
os.makedirs(LOG_DIR, exist_ok=True)


LOG_FILE_PATH = os.path.join(LOG_DIR, "app.log")
# File handler configuration
file_handler = RotatingFileHandler(
filename=os.path.join(LOG_DIR, "app.log"),
maxBytes=settings.FILE_LOG_MAX_BYTES,
backupCount=settings.FILE_LOG_BACKUP_COUNT,
)
file_handler.setLevel(settings.FILE_LOG_LEVEL)
file_handler.setFormatter(
build_formatter(
json_output=settings.FILE_LOG_FORMAT_JSON, pre_chain=SHARED_PROCESSORS + [file_log_filter_processors]
)
)

LOGGING_LEVEL = logging.INFO
LOGGING_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# Console handler configuration
console_handler = logging.StreamHandler()
console_handler.setLevel(settings.CONSOLE_LOG_LEVEL)
console_handler.setFormatter(
build_formatter(
json_output=settings.CONSOLE_LOG_FORMAT_JSON, pre_chain=SHARED_PROCESSORS + [console_log_filter_processors]
)
)

logging.basicConfig(level=LOGGING_LEVEL, format=LOGGING_FORMAT)

file_handler = RotatingFileHandler(LOG_FILE_PATH, maxBytes=10485760, backupCount=5)
file_handler.setLevel(LOGGING_LEVEL)
file_handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
# Root logger configuration
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
root_logger.handlers.clear() # avoid duplicate logs
root_logger.addHandler(file_handler)
root_logger.addHandler(console_handler)

logging.getLogger("").addHandler(file_handler)
# Uvicorn logger integration
for logger_name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
logger = logging.getLogger(logger_name)
logger.handlers.clear()
logger.propagate = True
logger.setLevel(logging.INFO)
3 changes: 2 additions & 1 deletion src/app/core/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from ..api.dependencies import get_current_superuser
from ..core.utils.rate_limit import rate_limiter
from ..middleware.client_cache_middleware import ClientCacheMiddleware
from ..middleware.logger_middleware import LoggerMiddleware
from ..models import * # noqa: F403
from .config import (
AppSettings,
Expand Down Expand Up @@ -219,7 +220,7 @@ def create_application(
allow_methods=settings.CORS_METHODS,
allow_headers=settings.CORS_HEADERS,
)

application.add_middleware(LoggerMiddleware)
if isinstance(settings, EnvironmentSettings):
if settings.ENVIRONMENT != EnvironmentOption.PRODUCTION:
docs_router = APIRouter()
Expand Down
13 changes: 11 additions & 2 deletions src/app/core/worker/functions.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import asyncio
import logging

import structlog
import uvloop
from arq.worker import Worker

asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())

logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")


# -------- background tasks --------
async def sample_background_task(ctx: Worker, name: str) -> str:
Expand All @@ -22,3 +21,13 @@ async def startup(ctx: Worker) -> None:

async def shutdown(ctx: Worker) -> None:
logging.info("Worker end")


async def on_job_start(ctx: Worker) -> None:
structlog.contextvars.bind_contextvars(job_id=ctx["job_id"])
logging.info("Job Started")


async def on_job_end(ctx: Worker) -> None:
logging.info("Job Competed")
structlog.contextvars.clear_contextvars()
29 changes: 28 additions & 1 deletion src/app/core/worker/settings.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
import asyncio
from typing import cast

from arq.cli import watch_reload
from arq.connections import RedisSettings
from arq.typing import WorkerSettingsType
from arq.worker import check_health, run_worker

from ...core.config import settings
from .functions import sample_background_task, shutdown, startup
from ...core.logger import logging # noqa: F401
from .functions import on_job_end, on_job_start, sample_background_task, shutdown, startup

REDIS_QUEUE_HOST = settings.REDIS_QUEUE_HOST
REDIS_QUEUE_PORT = settings.REDIS_QUEUE_PORT
Expand All @@ -12,4 +19,24 @@ class WorkerSettings:
redis_settings = RedisSettings(host=REDIS_QUEUE_HOST, port=REDIS_QUEUE_PORT)
on_startup = startup
on_shutdown = shutdown
on_job_start = on_job_start
on_job_end = on_job_end
handle_signals = False


def start_arq_service(check: bool = False, burst: int | None = None, watch: str | None = None):
worker_settings_ = cast("WorkerSettingsType", WorkerSettings)

if check:
exit(check_health(worker_settings_))
else:
kwargs = {} if burst is None else {"burst": burst}
if watch:
asyncio.run(watch_reload(watch, worker_settings_))
else:
run_worker(worker_settings_, **kwargs)


if __name__ == "__main__":
start_arq_service()
# python -m src.app.core.worker.settings
38 changes: 38 additions & 0 deletions src/app/middleware/logger_middleware.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# app/middleware/request_id.py
import uuid

import structlog
from fastapi import FastAPI, Request
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.responses import Response


class LoggerMiddleware(BaseHTTPMiddleware):
"""Middleware to add request ID to the context variables.

Parameters
----------
app: FastAPI
The FastAPI application instance.
"""

def __init__(self, app: FastAPI) -> None:
super().__init__(app)

async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
"""
Add request ID to the context variables.
"""
request_id = request.headers.get("X-Request-ID", str(uuid.uuid4()))
structlog.contextvars.clear_contextvars()
structlog.contextvars.bind_contextvars(
request_id=request_id,
client_host=request.client.host if request.client else None,
status_code=None,
path=request.url.path,
method=request.method,
)
response = await call_next(request)
structlog.contextvars.bind_contextvars(status_code=response.status_code)
response.headers["X-Request-ID"] = request_id
return response
Loading