Skip to content

198 add structlog #208

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions app/api/health.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
from starlette.concurrency import run_in_threadpool

from app.services.smtp import SMTPEmailService
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()

router = APIRouter()

Expand Down
4 changes: 2 additions & 2 deletions app/api/ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
from fastapi.responses import StreamingResponse

from app.services.llm import get_llm_service
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()

router = APIRouter()

Expand Down
4 changes: 2 additions & 2 deletions app/api/stuff.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
from app.database import get_db
from app.models.stuff import Stuff
from app.schemas.stuff import StuffResponse, StuffSchema
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()

router = APIRouter(prefix="/v1/stuff")

Expand Down
4 changes: 2 additions & 2 deletions app/api/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
from app.models.user import User
from app.schemas.user import TokenResponse, UserLogin, UserResponse, UserSchema
from app.services.auth import create_access_token
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()

router = APIRouter(prefix="/v1/user")

Expand Down
4 changes: 2 additions & 2 deletions app/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine

from app.config import settings as global_settings
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()

engine = create_async_engine(
global_settings.asyncpg_url.unicode_string(),
Expand Down
107 changes: 47 additions & 60 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,6 @@
from pathlib import Path

import asyncpg

# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
from fastapi import Depends, FastAPI, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
Expand All @@ -17,75 +13,66 @@
from app.api.stuff import router as stuff_router
from app.api.user import router as user_router
from app.config import settings as global_settings

# from app.database import engine
from app.redis import get_redis
from app.services.auth import AuthBearer
from app.utils.logging import AppStructLogger

# from app.services.scheduler import SchedulerMiddleware
from app.utils.logging import AppLogger

logger = AppLogger().get_logger()

logger = AppStructLogger().get_logger()
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")


@asynccontextmanager
async def lifespan(_app: FastAPI):
# Load the redis connection
_app.redis = await get_redis()

_postgres_dsn = global_settings.postgres_url.unicode_string()

async def lifespan(app: FastAPI):
app.redis = await get_redis()
postgres_dsn = global_settings.postgres_url.unicode_string()
try:
# TODO: cache with the redis connection
# Initialize the postgres connection pool
_app.postgres_pool = await asyncpg.create_pool(
dsn=_postgres_dsn,
app.postgres_pool = await asyncpg.create_pool(
dsn=postgres_dsn,
min_size=5,
max_size=20,
)
logger.info(f"Postgres pool created: {_app.postgres_pool.get_idle_size()=}")
logger.info("Postgres pool created", idle_size=app.postgres_pool.get_idle_size())
yield
finally:
# close redis connection and release the resources
await _app.redis.close()
# close postgres connection pool and release the resources
await _app.postgres_pool.close()


app = FastAPI(title="Stuff And Nonsense API", version="0.19.0", lifespan=lifespan)

app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])


app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
app.include_router(
health_router,
prefix="/v1/health",
tags=["Health, Bearer"],
dependencies=[Depends(AuthBearer())],
)


@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})


await app.redis.close()
await app.postgres_pool.close()

def create_app() -> FastAPI:
app = FastAPI(
title="Stuff And Nonsense API",
version="0.19.0",
lifespan=lifespan,
)
app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
app.include_router(
health_router,
prefix="/v1/health",
tags=["Health, Bearer"],
dependencies=[Depends(AuthBearer())],
)

@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})

return app

app = create_app()

# --- Unused/experimental code and TODOs ---
Copy link
Preview

Copilot AI Jun 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] There are multiple blocks of commented-out experimental code and TODOs; consider removing or relocating them to keep the codebase clean and focused.

Copilot uses AI. Check for mistakes.

# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
# from app.database import engine
# from app.services.scheduler import SchedulerMiddleware
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
# _scheduler_event_broker = RedisEventBroker(
# client_or_url=global_settings.redis_url.unicode_string()
# )
# _scheduler_event_broker = RedisEventBroker(client_or_url=global_settings.redis_url.unicode_string())
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
#
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)


# TODO: every not GET meth should reset cache
# TODO: every scheduler task which needs to act on database should have access to connection pool via request - maybe ?
# TODO: every non-GET method should reset cache
# TODO: scheduler tasks needing DB should access connection pool via request
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru
4 changes: 2 additions & 2 deletions app/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import DeclarativeBase, declared_attr

from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()


class Base(DeclarativeBase):
Expand Down
4 changes: 2 additions & 2 deletions app/services/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@

from app.config import settings as global_settings
from app.models.user import User
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()


async def get_from_redis(request: Request, key: str):
Expand Down
4 changes: 2 additions & 2 deletions app/services/smtp.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from pydantic import EmailStr

from app.config import settings as global_settings
from app.utils.logging import AppLogger
from app.utils.logging import AppStructLogger
from app.utils.singleton import SingletonMetaNoArgs

logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()


@define
Expand Down
69 changes: 55 additions & 14 deletions app/utils/logging.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,65 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from pathlib import Path

from rich.console import Console
from rich.logging import RichHandler
import orjson
import structlog
from attrs import define, field
from whenever._whenever import Instant
Copy link
Preview

Copilot AI Jun 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] Importing from a private module (_whenever) can break if the library internals change; consider using the public API entry point (e.g., from whenever import Instant) for stability.

Suggested change
from whenever._whenever import Instant
from whenever import Instant

Copilot uses AI. Check for mistakes.


from app.utils.singleton import SingletonMeta
from app.utils.singleton import SingletonMetaNoArgs


class AppLogger(metaclass=SingletonMeta):
_logger = None
# TODO: merge this wrapper with the one in structlog under one hood of AppLogger
class BytesToTextIOWrapper:
def __init__(self, handler, encoding="utf-8"):
self.handler = handler
self.encoding = encoding

def __init__(self):
self._logger = logging.getLogger(__name__)
def write(self, b):
if isinstance(b, bytes):
self.handler.stream.write(b.decode(self.encoding))
else:
self.handler.stream.write(b)
self.handler.flush()

def flush(self):
self.handler.flush()

def close(self):
self.handler.close()

def get_logger(self):
return self._logger

@define(slots=True)
class AppStructLogger(metaclass=SingletonMetaNoArgs):
_logger: structlog.BoundLogger = field(init=False)

class RichConsoleHandler(RichHandler):
def __init__(self, width=200, style=None, **kwargs):
super().__init__(
console=Console(color_system="256", width=width, style=style, stderr=True),
**kwargs,
def __attrs_post_init__(self):
_log_date = Instant.now().py_datetime().strftime("%Y%m%d")
_log_path = Path(f"{_log_date}_{os.getpid()}.log")
Copy link
Preview

Copilot AI Jun 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] Logs are written to the current working directory; consider writing to a dedicated logs/ directory or ensuring the path exists to avoid clutter.

Copilot uses AI. Check for mistakes.

_handler = RotatingFileHandler(
filename=_log_path,
mode="a",
maxBytes=10 * 1024 * 1024,
backupCount=5,
encoding="utf-8"
)
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
logger_factory=structlog.BytesLoggerFactory(
file=BytesToTextIOWrapper(_handler)
)
)
self._logger = structlog.get_logger()

def get_logger(self) -> structlog.BoundLogger:
return self._logger
1 change: 0 additions & 1 deletion compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ services:
- .secrets
command: bash -c "
uvicorn app.main:app
--log-config ./logging-uvicorn.json
--host 0.0.0.0 --port 8080
--lifespan=on --use-colors --loop uvloop --http httptools
--reload --log-level debug
Expand Down
1 change: 0 additions & 1 deletion granian-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ services:
granian --interface asgi
--host 0.0.0.0 --port 8080
app.main:app --access-log --log-level debug
--log-config ./logging-granian.json
"
volumes:
- ./app:/panettone/app
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ dependencies = [
"polyfactory>=2.21.0",
"granian>=2.3.2",
"apscheduler[redis,sqlalchemy]>=4.0.0a6",
"structlog>=25.4.0",
"whenever>=0.8.5",
]

[tool.uv]
Expand Down
Loading