From 1b60729905062e951fc7c0130a59e555959b820b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?sam=E7=9A=84=E7=94=B5=E8=84=91?= Date: Sun, 15 Mar 2026 08:03:23 +0800 Subject: [PATCH 01/15] feat: add fullstack review and validation tooling --- .dockerignore | 10 + .env.example | 4 +- .gitignore | 7 +- Dockerfile.fullstack | 29 ++ README.md | 11 + app/Dockerfile | 1 + app/docker-entrypoint.d/40-finmind-env.sh | 6 + app/index.html | 1 + app/nginx.conf | 10 + app/public/env.js | 1 + app/src/__tests__/apiClient.test.ts | 7 +- app/src/api/client.ts | 19 +- app/src/vite-env.d.ts | 2 + app/vite.config.ts | 54 ++- docker-compose.prod.yml | 177 ++++++++ packages/backend/Dockerfile | 4 +- packages/backend/app/__init__.py | 92 +++- packages/backend/app/config.py | 16 + packages/backend/app/observability.py | 8 + packages/backend/app/request_utils.py | 12 + packages/backend/app/routes/auth.py | 23 +- packages/backend/app/routes/bills.py | 81 +++- packages/backend/app/routes/categories.py | 15 +- packages/backend/app/routes/expenses.py | 104 ++++- packages/backend/app/routes/insights.py | 11 + packages/backend/app/routes/reminders.py | 34 +- .../backend/app/services/expense_import.py | 15 +- packages/backend/docker-entrypoint.sh | 16 + packages/backend/tests/test_auth.py | 13 + packages/backend/tests/test_bills.py | 49 +++ packages/backend/tests/test_categories.py | 11 + packages/backend/tests/test_config.py | 8 + packages/backend/tests/test_expenses.py | 103 +++++ packages/backend/tests/test_health.py | 86 ++++ packages/backend/tests/test_insights.py | 9 + packages/backend/tests/test_reminders.py | 24 ++ scripts/build-demo-video.sh | 127 ++++++ scripts/deploy-prod.ps1 | 14 + scripts/deploy-prod.sh | 13 + scripts/record-browser-demo.mjs | 28 ++ scripts/record-demo-video.sh | 89 ++++ scripts/review-deploy.sh | 84 ++++ scripts/smoke-deploy.py | 218 ++++++++++ scripts/validate-deploy.sh | 9 + scripts/validate-observability.sh | 58 +++ scripts/validate-public-deployment.sh | 57 +++ scripts/validate-ui.mjs | 392 ++++++++++++++++++ 47 files changed, 2074 insertions(+), 88 deletions(-) create mode 100644 .dockerignore create mode 100644 Dockerfile.fullstack create mode 100755 app/docker-entrypoint.d/40-finmind-env.sh create mode 100644 app/public/env.js create mode 100644 docker-compose.prod.yml create mode 100644 packages/backend/app/request_utils.py create mode 100755 packages/backend/docker-entrypoint.sh create mode 100644 packages/backend/tests/test_config.py create mode 100644 packages/backend/tests/test_health.py create mode 100755 scripts/build-demo-video.sh create mode 100644 scripts/deploy-prod.ps1 create mode 100755 scripts/deploy-prod.sh create mode 100755 scripts/record-browser-demo.mjs create mode 100755 scripts/record-demo-video.sh create mode 100755 scripts/review-deploy.sh create mode 100755 scripts/smoke-deploy.py create mode 100755 scripts/validate-deploy.sh create mode 100755 scripts/validate-observability.sh create mode 100755 scripts/validate-public-deployment.sh create mode 100644 scripts/validate-ui.mjs diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..4080e764 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ +.git +.github +app/dist +app/node_modules +packages/backend/.pytest_cache +packages/backend/__pycache__ +packages/backend/app/__pycache__ +**/__pycache__ +**/*.pyc +tmp diff --git a/.env.example b/.env.example index bfea48dd..cd5d93d3 100644 --- a/.env.example +++ b/.env.example @@ -7,11 +7,13 @@ JWT_SECRET="change-me" OPENAI_API_KEY="" GEMINI_API_KEY="" GEMINI_MODEL="gemini-1.5-flash" +CORS_ALLOWED_ORIGINS="http://localhost:5173,http://127.0.0.1:5173,http://localhost:8081,http://127.0.0.1:8081,http://frontend,http://frontend:80" TWILIO_ACCOUNT_SID="" TWILIO_AUTH_TOKEN="" TWILIO_WHATSAPP_FROM="" EMAIL_FROM="" SMTP_URL="" +MAX_UPLOAD_BYTES="10485760" -VITE_API_URL="http://localhost:8000" +VITE_API_URL="" LOG_LEVEL="INFO" diff --git a/.gitignore b/.gitignore index cf59987d..46f2ba1c 100644 --- a/.gitignore +++ b/.gitignore @@ -33,15 +33,14 @@ logs/ npm-debug.log* yarn-debug.log* pnpm-debug.log* +output/ +tmp/ plan.md checklist.md continuation_prompt.md deployment.md SESSION_SUMMARY.md - -docker-compose.prod.yml - FEATURES.md create_issues.ps1 -bounty_issues.ps1 \ No newline at end of file +bounty_issues.ps1 diff --git a/Dockerfile.fullstack b/Dockerfile.fullstack new file mode 100644 index 00000000..dc1dfbe1 --- /dev/null +++ b/Dockerfile.fullstack @@ -0,0 +1,29 @@ +FROM node:20-alpine AS frontend-builder +WORKDIR /frontend +COPY app/package*.json ./ +RUN npm ci || npm install +COPY app ./ +RUN npm run build + +FROM python:3.11-slim AS backend +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=1 \ + FINMIND_SERVE_SPA=1 + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +COPY packages/backend/requirements.txt /app/requirements.txt +RUN pip install -r /app/requirements.txt + +COPY packages/backend/app /app/app +COPY packages/backend/wsgi.py /app/wsgi.py +COPY --from=frontend-builder /frontend/dist /app/app/static + +EXPOSE 8000 +CMD ["gunicorn", "-w", "2", "-k", "gthread", "-b", "0.0.0.0:8000", "wsgi:app"] diff --git a/README.md b/README.md index 49592bff..0dded978 100644 --- a/README.md +++ b/README.md @@ -144,6 +144,10 @@ finmind/ - Frontend: Vercel. - Secrets: use environment variables (.env locally, platform secrets in cloud). - Kubernetes manifests for full stack deployment are available in `deploy/k8s/`. +- Production deployment artifacts are documented in `DEPLOY.md`. +- Single-command production Compose entrypoints: + - POSIX: `./scripts/deploy-prod.sh` + - PowerShell: `./scripts/deploy-prod.ps1` ## Local Development 1) Prereqs: Docker, Docker Compose, Node 20+, Python 3.11+ @@ -156,6 +160,13 @@ finmind/ - Loki: http://localhost:3100 - Nginx proxy: http://localhost:8080 (status at `/nginx_status`) +### Production-style local verification +- Copy env: `cp .env.example .env` +- Start prod stack: `./scripts/deploy-prod.sh` +- Smoke-check runtime acceptance: `./scripts/validate-deploy.sh` +- Full maintainer review path: `./scripts/review-deploy.sh` +- CI review artifact bundle: `finmind-review-artifacts` + ### Backend Test Runner (No local pytest setup required) - PowerShell (Windows): - `./scripts/test-backend.ps1` diff --git a/app/Dockerfile b/app/Dockerfile index 2d1a3ad0..da178cd0 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -9,6 +9,7 @@ RUN npm run build FROM nginx:alpine # Copy custom nginx config for SPA fallback COPY nginx.conf /etc/nginx/conf.d/default.conf +COPY docker-entrypoint.d/40-finmind-env.sh /docker-entrypoint.d/40-finmind-env.sh COPY --from=builder /app/dist /usr/share/nginx/html EXPOSE 80 CMD ["nginx", "-g", "daemon off;"] diff --git a/app/docker-entrypoint.d/40-finmind-env.sh b/app/docker-entrypoint.d/40-finmind-env.sh new file mode 100755 index 00000000..61de2487 --- /dev/null +++ b/app/docker-entrypoint.d/40-finmind-env.sh @@ -0,0 +1,6 @@ +#!/bin/sh +set -eu + +cat >/usr/share/nginx/html/env.js <
+ diff --git a/app/nginx.conf b/app/nginx.conf index bb4a38b9..5e39599b 100644 --- a/app/nginx.conf +++ b/app/nginx.conf @@ -9,6 +9,16 @@ server { try_files $uri /index.html; } + location = /nginx_status { + stub_status; + allow 127.0.0.1; + allow 10.0.0.0/8; + allow 172.16.0.0/12; + allow 192.168.0.0/16; + deny all; + access_log off; + } + # Optional: cache static assets location ~* \.(?:css|js|jpg|jpeg|gif|png|svg|ico|woff2?)$ { expires 7d; diff --git a/app/public/env.js b/app/public/env.js new file mode 100644 index 00000000..bd2ecd88 --- /dev/null +++ b/app/public/env.js @@ -0,0 +1 @@ +window.__FINMIND_API_URL__ = window.__FINMIND_API_URL__ || ""; diff --git a/app/src/__tests__/apiClient.test.ts b/app/src/__tests__/apiClient.test.ts index 89cc60bc..b3aec021 100644 --- a/app/src/__tests__/apiClient.test.ts +++ b/app/src/__tests__/apiClient.test.ts @@ -1,4 +1,4 @@ -import { api } from '@/api/client'; +import { api, resolveApiBaseUrl } from '@/api/client'; import * as auth from '@/api/auth'; // Use real localStorage via JSDOM @@ -85,4 +85,9 @@ describe('api client', () => { 'Server error. Please try again in a minute.', ); }); + + it('falls back to same-origin when no explicit API URL is configured', () => { + delete (globalThis as { __FINMIND_API_URL__?: string }).__FINMIND_API_URL__; + expect(resolveApiBaseUrl()).toBe(''); + }); }); diff --git a/app/src/api/client.ts b/app/src/api/client.ts index 18ca4284..191e58db 100644 --- a/app/src/api/client.ts +++ b/app/src/api/client.ts @@ -7,7 +7,7 @@ import { } from '../lib/auth'; import { refresh as refreshApi } from './auth'; -function resolveApiBaseUrl(): string { +export function resolveApiBaseUrl(): string { const fromRuntime = (globalThis as { __FINMIND_API_URL__?: string }).__FINMIND_API_URL__; if (fromRuntime) return fromRuntime.replace(/\/$/, ''); @@ -15,15 +15,15 @@ function resolveApiBaseUrl(): string { .process?.env?.VITE_API_URL; if (fromProcess) return fromProcess.replace(/\/$/, ''); - try { - const metaEnv = Function( - 'return (typeof import !== "undefined" && import.meta && import.meta.env) ? import.meta.env : {};', - )() as Record; - if (metaEnv?.VITE_API_URL) return metaEnv.VITE_API_URL.replace(/\/$/, ''); - } catch { - // ignored for non-vite runtime (tests). + if (typeof __FINMIND_VITE_API_URL__ !== 'undefined' && __FINMIND_VITE_API_URL__) { + return __FINMIND_VITE_API_URL__.replace(/\/$/, ''); } - return 'http://localhost:8000'; + + if (typeof window !== 'undefined' && window.location?.origin) { + return ''; + } + + return 'http://127.0.0.1:8000'; } export const baseURL = resolveApiBaseUrl(); @@ -45,7 +45,6 @@ export async function api( method: opts.method || 'GET', headers, body: opts.body ? JSON.stringify(opts.body) : undefined, - credentials: 'include', }); } diff --git a/app/src/vite-env.d.ts b/app/src/vite-env.d.ts index 11f02fe2..03644df2 100644 --- a/app/src/vite-env.d.ts +++ b/app/src/vite-env.d.ts @@ -1 +1,3 @@ /// + +declare const __FINMIND_VITE_API_URL__: string; diff --git a/app/vite.config.ts b/app/vite.config.ts index 329b7953..12301651 100644 --- a/app/vite.config.ts +++ b/app/vite.config.ts @@ -1,19 +1,45 @@ -import { defineConfig } from "vite"; +import { defineConfig, loadEnv } from "vite"; import react from "@vitejs/plugin-react-swc"; import path from "path"; // https://vitejs.dev/config/ -export default defineConfig(({ mode }) => ({ - server: { - host: "::", - port: 5173, - }, - plugins: [ - react(), - ].filter(Boolean), - resolve: { - alias: { - "@": path.resolve(__dirname, "./src"), +export default defineConfig(({ mode }) => { + const env = loadEnv(mode, process.cwd(), ""); + const proxyTarget = env.VITE_PROXY_API_TARGET || "http://127.0.0.1:8000"; + const proxyPaths = [ + "/auth", + "/categories", + "/expenses", + "/bills", + "/reminders", + "/dashboard", + "/insights", + "/health", + "/metrics", + ]; + + return { + define: { + __FINMIND_VITE_API_URL__: JSON.stringify(env.VITE_API_URL || ""), + }, + server: { + host: "::", + port: 5173, + proxy: Object.fromEntries( + proxyPaths.map((prefix) => [ + prefix, + { + target: proxyTarget, + changeOrigin: true, + }, + ]), + ), + }, + plugins: [react()].filter(Boolean), + resolve: { + alias: { + "@": path.resolve(__dirname, "./src"), + }, }, - }, -})); + }; +}); diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml new file mode 100644 index 00000000..789bd25d --- /dev/null +++ b/docker-compose.prod.yml @@ -0,0 +1,177 @@ +services: + postgres: + image: postgres:16 + environment: + POSTGRES_USER: ${POSTGRES_USER:-finmind} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-finmind} + POSTGRES_DB: ${POSTGRES_DB:-finmind} + ports: + - "5432:5432" + volumes: + - pgdata:/var/lib/postgresql/data + restart: unless-stopped + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-finmind} -d ${POSTGRES_DB:-finmind}"] + interval: 10s + timeout: 5s + retries: 10 + + redis: + image: redis:7 + ports: + - "6379:6379" + restart: unless-stopped + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 10 + + migrate: + build: + context: ./packages/backend + env_file: + - .env + depends_on: + postgres: + condition: service_healthy + environment: + DATABASE_URL: ${DATABASE_URL:-postgresql+psycopg2://finmind:finmind@postgres:5432/finmind} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + JWT_SECRET: ${JWT_SECRET:-change-me} + GEMINI_API_KEY: ${GEMINI_API_KEY:-} + GEMINI_MODEL: ${GEMINI_MODEL:-gemini-1.5-flash} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + command: ["python", "-m", "flask", "--app", "wsgi:app", "init-db"] + restart: "no" + + backend: + build: + context: ./packages/backend + env_file: + - .env + depends_on: + postgres: + condition: service_healthy + migrate: + condition: service_completed_successfully + redis: + condition: service_healthy + environment: + DATABASE_URL: ${DATABASE_URL:-postgresql+psycopg2://finmind:finmind@postgres:5432/finmind} + REDIS_URL: ${REDIS_URL:-redis://redis:6379/0} + JWT_SECRET: ${JWT_SECRET:-change-me} + GEMINI_API_KEY: ${GEMINI_API_KEY:-} + GEMINI_MODEL: ${GEMINI_MODEL:-gemini-1.5-flash} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + ports: + - "8000:8000" + command: >- + sh -c "export PROMETHEUS_MULTIPROC_DIR=/tmp/prometheus_multiproc && + rm -rf $$PROMETHEUS_MULTIPROC_DIR && + mkdir -p $$PROMETHEUS_MULTIPROC_DIR && + gunicorn --workers=2 --threads=4 --bind 0.0.0.0:8000 wsgi:app" + restart: unless-stopped + healthcheck: + test: + [ + "CMD", + "python", + "-c", + "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8000/health/ready').read()", + ] + interval: 15s + timeout: 5s + retries: 10 + + frontend: + build: + context: ./app + depends_on: + backend: + condition: service_healthy + environment: + FINMIND_API_URL: ${FINMIND_API_URL:-http://localhost:8000} + ports: + - "8081:80" + restart: unless-stopped + healthcheck: + test: ["CMD-SHELL", "wget -qO- http://127.0.0.1/ >/dev/null"] + interval: 15s + timeout: 5s + retries: 10 + + prometheus: + image: prom/prometheus:v2.54.1 + profiles: ["observability"] + command: + - --config.file=/etc/prometheus/prometheus.yml + - --storage.tsdb.retention.time=14d + - --storage.tsdb.retention.size=1GB + volumes: + - ./deploy/observability/prometheus.yml:/etc/prometheus/prometheus.yml:ro + - prometheus-data:/prometheus + ports: + - "9090:9090" + depends_on: + backend: + condition: service_healthy + postgres-exporter: + condition: service_started + redis-exporter: + condition: service_started + nginx-exporter: + condition: service_started + restart: unless-stopped + + grafana: + image: grafana/grafana-oss:11.1.5 + profiles: ["observability"] + depends_on: + prometheus: + condition: service_started + environment: + GF_SECURITY_ADMIN_USER: ${GRAFANA_ADMIN_USER:-finmind_admin} + GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_ADMIN_PASSWORD:-change-this-admin-password} + GF_USERS_ALLOW_SIGN_UP: "false" + volumes: + - ./deploy/observability/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro + - grafana-data:/var/lib/grafana + ports: + - "3000:3000" + restart: unless-stopped + + postgres-exporter: + image: quay.io/prometheuscommunity/postgres-exporter:v0.16.0 + profiles: ["observability"] + environment: + DATA_SOURCE_NAME: postgresql://${POSTGRES_USER:-finmind}:${POSTGRES_PASSWORD:-finmind}@postgres:5432/${POSTGRES_DB:-finmind}?sslmode=disable + depends_on: + postgres: + condition: service_healthy + restart: unless-stopped + + redis-exporter: + image: oliver006/redis_exporter:v1.63.0 + profiles: ["observability"] + environment: + REDIS_ADDR: redis://redis:6379 + depends_on: + redis: + condition: service_healthy + restart: unless-stopped + + nginx-exporter: + image: nginx/nginx-prometheus-exporter:1.3.0 + profiles: ["observability"] + command: + - -nginx.scrape-uri=http://frontend/nginx_status + depends_on: + frontend: + condition: service_healthy + restart: unless-stopped + +volumes: + pgdata: + prometheus-data: + grafana-data: diff --git a/packages/backend/Dockerfile b/packages/backend/Dockerfile index 68c4d4f6..672e6485 100644 --- a/packages/backend/Dockerfile +++ b/packages/backend/Dockerfile @@ -19,6 +19,8 @@ RUN pip install -r requirements.txt # Copy backend source COPY app /app/app COPY wsgi.py /app/wsgi.py +COPY docker-entrypoint.sh /app/docker-entrypoint.sh +RUN chmod +x /app/docker-entrypoint.sh EXPOSE 8000 -CMD ["gunicorn", "-w", "2", "-k", "gthread", "-b", "0.0.0.0:8000", "wsgi:app"] +CMD ["./docker-entrypoint.sh"] diff --git a/packages/backend/app/__init__.py b/packages/backend/app/__init__.py index cdf76b45..e3e5910b 100644 --- a/packages/backend/app/__init__.py +++ b/packages/backend/app/__init__.py @@ -1,6 +1,8 @@ -from flask import Flask, jsonify +from flask import Flask, abort, jsonify, send_from_directory +from werkzeug.exceptions import RequestEntityTooLarge +from sqlalchemy import text from .config import Settings -from .extensions import db, jwt +from .extensions import db, jwt, redis_client from .routes import register_routes from .observability import ( Observability, @@ -13,6 +15,20 @@ import os import logging from datetime import timedelta +from pathlib import Path + + +SPA_EXCLUDED_PREFIXES = ( + "/auth", + "/categories", + "/expenses", + "/bills", + "/reminders", + "/dashboard", + "/insights", + "/health", + "/metrics", +) def create_app(settings: Settings | None = None) -> Flask: @@ -23,6 +39,7 @@ def create_app(settings: Settings | None = None) -> Flask: app.config.update( SQLALCHEMY_DATABASE_URI=cfg.database_url, SQLALCHEMY_TRACK_MODIFICATIONS=False, + MAX_CONTENT_LENGTH=cfg.max_upload_bytes, JWT_SECRET_KEY=cfg.jwt_secret, JWT_ACCESS_TOKEN_EXPIRES=timedelta(minutes=cfg.jwt_access_minutes), JWT_REFRESH_TOKEN_EXPIRES=timedelta(hours=cfg.jwt_refresh_hours), @@ -45,13 +62,17 @@ def create_app(settings: Settings | None = None) -> Flask: db.init_app(app) jwt.init_app(app) app.extensions["observability"] = Observability() - # CORS for local dev frontend - CORS(app, resources={r"*": {"origins": "*"}}, supports_credentials=True) + # Keep local/front-end origins explicit so bearer-token requests work + # without opening wildcard credentialed CORS. + CORS(app, resources={r"*": {"origins": cfg.cors_origins}}) # Redis (already global) # Blueprint routes register_routes(app) + if _should_serve_spa(app): + _register_spa_routes(app) + # Backward-compatible schema patch for existing databases. with app.app_context(): _ensure_schema_compatibility(app) @@ -68,6 +89,29 @@ def _after_request(response): def health(): return jsonify(status="ok"), 200 + @app.get("/health/ready") + def health_ready(): + checks = {"database": "error", "redis": "error"} + status_code = 200 + + try: + db.session.execute(text("SELECT 1")) + checks["database"] = "connected" + except Exception: + app.logger.exception("Database readiness check failed") + db.session.rollback() + status_code = 503 + + try: + redis_client.ping() + checks["redis"] = "connected" + except Exception: + app.logger.exception("Redis readiness check failed") + status_code = 503 + + status = "ok" if status_code == 200 else "error" + return jsonify(status=status, checks=checks), status_code + @app.get("/metrics") def metrics(): obs = app.extensions["observability"] @@ -77,6 +121,10 @@ def metrics(): def internal_error(_error): return jsonify(error="internal server error"), 500 + @app.errorhandler(RequestEntityTooLarge) + def request_too_large(_error): + return jsonify(error="upload too large"), 413 + @app.cli.command("init-db") def init_db(): """Initialize database schema from db/schema.sql""" @@ -96,11 +144,45 @@ def init_db(): return app +def _should_serve_spa(app: Flask) -> bool: + if os.getenv("FINMIND_SERVE_SPA", "0") != "1": + return False + if not app.static_folder: + return False + return Path(app.static_folder, "index.html").exists() + + +def _register_spa_routes(app: Flask) -> None: + @app.get("/", defaults={"path": ""}) + @app.get("/") + def serve_spa(path: str): + requested_path = f"/{path}" if path else "/" + if any( + requested_path == prefix or requested_path.startswith(f"{prefix}/") + for prefix in SPA_EXCLUDED_PREFIXES + ): + abort(404) + + static_root = Path(app.static_folder or "") + asset_path = static_root / path + if path and asset_path.is_file(): + return send_from_directory(str(static_root), path) + + return send_from_directory(str(static_root), "index.html") + + def _ensure_schema_compatibility(app: Flask) -> None: """Apply minimal compatibility ALTERs for existing deployments.""" if db.engine.dialect.name != "postgresql": return - conn = db.engine.raw_connection() + try: + conn = db.engine.raw_connection() + except Exception: + app.logger.warning( + "Skipping schema compatibility patch until database is reachable", + exc_info=True, + ) + return try: cur = conn.cursor() cur.execute( diff --git a/packages/backend/app/config.py b/packages/backend/app/config.py index cf789755..b499e115 100644 --- a/packages/backend/app/config.py +++ b/packages/backend/app/config.py @@ -22,6 +22,17 @@ class Settings(BaseSettings): email_from: str | None = None smtp_url: str | None = None # e.g. smtp+ssl://user:pass@mail:465 + max_upload_bytes: int = 10 * 1024 * 1024 + cors_allowed_origins: str = Field( + default=( + "http://localhost:5173," + "http://127.0.0.1:5173," + "http://localhost:8081," + "http://127.0.0.1:8081," + "http://frontend," + "http://frontend:80" + ) + ) # pydantic-settings v2 configuration model_config = SettingsConfigDict( @@ -29,3 +40,8 @@ class Settings(BaseSettings): env_file_encoding="utf-8", extra="ignore", ) + + @property + def cors_origins(self) -> list[str]: + origins = [origin.strip() for origin in self.cors_allowed_origins.split(",")] + return [origin for origin in origins if origin] diff --git a/packages/backend/app/observability.py b/packages/backend/app/observability.py index 0c2a6bf7..eb49dbbd 100644 --- a/packages/backend/app/observability.py +++ b/packages/backend/app/observability.py @@ -118,6 +118,14 @@ def finalize_request(response: Response) -> Response: if request_id: response.headers["X-Request-ID"] = request_id + response.headers.setdefault("X-Content-Type-Options", "nosniff") + response.headers.setdefault("X-Frame-Options", "DENY") + response.headers.setdefault("Referrer-Policy", "strict-origin-when-cross-origin") + + if request.path.startswith("/auth/"): + response.headers["Cache-Control"] = "no-store" + response.headers["Pragma"] = "no-cache" + request_start = getattr(g, "request_start", None) if request_start is not None: elapsed = time.perf_counter() - request_start diff --git a/packages/backend/app/request_utils.py b/packages/backend/app/request_utils.py new file mode 100644 index 00000000..37037b75 --- /dev/null +++ b/packages/backend/app/request_utils.py @@ -0,0 +1,12 @@ +from typing import Any + +from flask import request + + +def get_json_object() -> dict[str, Any] | None: + data = request.get_json(silent=True) + if data is None: + return {} + if isinstance(data, dict): + return data + return None diff --git a/packages/backend/app/routes/auth.py b/packages/backend/app/routes/auth.py index 05a39377..c2a31c61 100644 --- a/packages/backend/app/routes/auth.py +++ b/packages/backend/app/routes/auth.py @@ -1,4 +1,4 @@ -from flask import Blueprint, request, jsonify +from flask import Blueprint, jsonify from werkzeug.security import generate_password_hash, check_password_hash from flask_jwt_extended import ( create_access_token, @@ -10,6 +10,7 @@ ) from ..extensions import db, redis_client from ..models import User +from ..request_utils import get_json_object import logging import time @@ -30,9 +31,11 @@ @bp.post("/register") def register(): - data = request.get_json() or {} - email = data.get("email") - password = data.get("password") + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 + email = str(data.get("email") or "").strip() + password = str(data.get("password") or "") if not email or not password: logger.warning("Register missing email/password") return jsonify(error="email and password required"), 400 @@ -52,9 +55,11 @@ def register(): @bp.post("/login") def login(): - data = request.get_json() or {} - email = data.get("email") - password = data.get("password") + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 + email = str(data.get("email") or "").strip() + password = str(data.get("password") or "") user = db.session.query(User).filter_by(email=email).first() if not user or not check_password_hash(user.password_hash, password): logger.warning("Login failed for email=%s", email) @@ -87,7 +92,9 @@ def update_me(): user = db.session.get(User, uid) if not user: return jsonify(error="not found"), 404 - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 if "preferred_currency" in data: cur = str(data.get("preferred_currency") or "").upper().strip() if cur not in SUPPORTED_CURRENCIES: diff --git a/packages/backend/app/routes/bills.py b/packages/backend/app/routes/bills.py index f557e90d..099b48dc 100644 --- a/packages/backend/app/routes/bills.py +++ b/packages/backend/app/routes/bills.py @@ -1,8 +1,10 @@ from datetime import date, timedelta -from flask import Blueprint, jsonify, request +from decimal import Decimal, InvalidOperation +from flask import Blueprint, jsonify from flask_jwt_extended import jwt_required, get_jwt_identity from ..extensions import db from ..models import Bill, BillCadence, User +from ..request_utils import get_json_object from ..services.cache import cache_delete_patterns import logging @@ -44,17 +46,40 @@ def list_bills(): def create_bill(): uid = int(get_jwt_identity()) user = db.session.get(User, uid) - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 + name = str(data.get("name") or "").strip() + if not name: + return jsonify(error="name required"), 400 + amount = _parse_amount(data.get("amount")) + if amount is None: + return jsonify(error="invalid amount"), 400 + due_date = _parse_due_date(data.get("next_due_date")) + if due_date is None: + return jsonify(error="invalid next_due_date"), 400 + cadence = _parse_cadence(data.get("cadence", "MONTHLY")) + if cadence is None: + return jsonify(error="invalid cadence"), 400 + autopay_enabled = _parse_bool_flag(data.get("autopay_enabled"), default=False) + if autopay_enabled is None: + return jsonify(error="invalid autopay_enabled"), 400 + channel_whatsapp = _parse_bool_flag(data.get("channel_whatsapp"), default=False) + if channel_whatsapp is None: + return jsonify(error="invalid channel_whatsapp"), 400 + channel_email = _parse_bool_flag(data.get("channel_email"), default=True) + if channel_email is None: + return jsonify(error="invalid channel_email"), 400 b = Bill( user_id=uid, - name=data["name"], - amount=data["amount"], + name=name, + amount=amount, currency=data.get("currency") or (user.preferred_currency if user else "INR"), - next_due_date=date.fromisoformat(data["next_due_date"]), - cadence=BillCadence(data.get("cadence", "MONTHLY")), - autopay_enabled=bool(data.get("autopay_enabled", False)), - channel_whatsapp=bool(data.get("channel_whatsapp", False)), - channel_email=bool(data.get("channel_email", True)), + next_due_date=due_date, + cadence=BillCadence(cadence), + autopay_enabled=autopay_enabled, + channel_whatsapp=channel_whatsapp, + channel_email=channel_email, ) db.session.add(b) db.session.commit() @@ -89,3 +114,41 @@ def mark_paid(bill_id: int): "Marked bill paid id=%s user=%s next_due_date=%s", b.id, uid, b.next_due_date ) return jsonify(message="updated") + + +def _parse_amount(raw) -> Decimal | None: + try: + return Decimal(str(raw)).quantize(Decimal("0.01")) + except (InvalidOperation, ValueError, TypeError): + return None + + +def _parse_due_date(raw) -> date | None: + if not raw: + return None + try: + return date.fromisoformat(str(raw)) + except ValueError: + return None + + +def _parse_cadence(raw: str | None) -> str | None: + cadence = str(raw or "").upper().strip() + if cadence in {item.value for item in BillCadence}: + return cadence + return None + + +def _parse_bool_flag(raw, *, default: bool) -> bool | None: + if raw is None: + return default + if isinstance(raw, bool): + return raw + if isinstance(raw, int) and raw in {0, 1}: + return bool(raw) + value = str(raw).strip().lower() + if value in {"true", "1", "yes", "on"}: + return True + if value in {"false", "0", "no", "off"}: + return False + return None diff --git a/packages/backend/app/routes/categories.py b/packages/backend/app/routes/categories.py index 71269a13..595e266c 100644 --- a/packages/backend/app/routes/categories.py +++ b/packages/backend/app/routes/categories.py @@ -1,8 +1,9 @@ import logging -from flask import Blueprint, jsonify, request +from flask import Blueprint, jsonify from flask_jwt_extended import jwt_required, get_jwt_identity from ..extensions import db from ..models import Category +from ..request_utils import get_json_object bp = Blueprint("categories", __name__) logger = logging.getLogger("finmind.categories") @@ -23,8 +24,10 @@ def list_categories(): @jwt_required() def create_category(): uid = int(get_jwt_identity()) - data = request.get_json() or {} - name = (data.get("name") or "").strip() + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 + name = str(data.get("name") or "").strip() if not name: logger.warning("Create category missing name user=%s", uid) return jsonify(error="name required"), 400 @@ -46,8 +49,10 @@ def update_category(category_id: int): c = db.session.get(Category, category_id) if not c or c.user_id != uid: return jsonify(error="not found"), 404 - data = request.get_json() or {} - name = (data.get("name") or "").strip() + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 + name = str(data.get("name") or "").strip() if not name: return jsonify(error="name required"), 400 c.name = name diff --git a/packages/backend/app/routes/expenses.py b/packages/backend/app/routes/expenses.py index 1376d46f..aa35208a 100644 --- a/packages/backend/app/routes/expenses.py +++ b/packages/backend/app/routes/expenses.py @@ -6,6 +6,7 @@ from flask_jwt_extended import jwt_required, get_jwt_identity from ..extensions import db from ..models import Expense, RecurringCadence, RecurringExpense, User +from ..request_utils import get_json_object from ..services.cache import cache_delete_patterns, monthly_summary_key from ..services import expense_import import logging @@ -57,12 +58,24 @@ def list_expenses(): def create_expense(): uid = int(get_jwt_identity()) user = db.session.get(User, uid) - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 amount = _parse_amount(data.get("amount")) if amount is None: return jsonify(error="invalid amount"), 400 + category_id = _parse_category_id(data.get("category_id")) + if ( + "category_id" in data + and data.get("category_id") not in (None, "", "null") + and category_id is None + ): + return jsonify(error="invalid category_id"), 400 raw_date = data.get("date") or data.get("spent_at") - description = (data.get("description") or data.get("notes") or "").strip() + spent_at = _parse_date(raw_date) + if raw_date and spent_at is None: + return jsonify(error="invalid date"), 400 + description = str(data.get("description") or data.get("notes") or "").strip() if not description: return jsonify(error="description required"), 400 e = Expense( @@ -70,9 +83,9 @@ def create_expense(): amount=amount, currency=(data.get("currency") or (user.preferred_currency if user else "INR")), expense_type=str(data.get("expense_type") or "EXPENSE").upper(), - category_id=data.get("category_id"), + category_id=category_id, notes=description, - spent_at=date.fromisoformat(raw_date) if raw_date else date.today(), + spent_at=spent_at or date.today(), ) db.session.add(e) db.session.commit() @@ -105,11 +118,20 @@ def list_recurring_expenses(): def create_recurring_expense(): uid = int(get_jwt_identity()) user = db.session.get(User, uid) - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 amount = _parse_amount(data.get("amount")) if amount is None: return jsonify(error="invalid amount"), 400 - description = (data.get("description") or data.get("notes") or "").strip() + category_id = _parse_category_id(data.get("category_id")) + if ( + "category_id" in data + and data.get("category_id") not in (None, "", "null") + and category_id is None + ): + return jsonify(error="invalid category_id"), 400 + description = str(data.get("description") or data.get("notes") or "").strip() if not description: return jsonify(error="description required"), 400 cadence = _parse_recurring_cadence(data.get("cadence")) @@ -119,20 +141,20 @@ def create_recurring_expense(): if not start_raw: return jsonify(error="start_date required"), 400 try: - start_date = date.fromisoformat(start_raw) - except ValueError: + start_date = date.fromisoformat(str(start_raw)) + except (TypeError, ValueError): return jsonify(error="invalid start_date"), 400 end_date = None if data.get("end_date"): try: - end_date = date.fromisoformat(data.get("end_date")) - except ValueError: + end_date = date.fromisoformat(str(data.get("end_date"))) + except (TypeError, ValueError): return jsonify(error="invalid end_date"), 400 if end_date < start_date: return jsonify(error="end_date must be on or after start_date"), 400 recurring = RecurringExpense( user_id=uid, - category_id=data.get("category_id"), + category_id=category_id, amount=amount, currency=(data.get("currency") or (user.preferred_currency if user else "INR")), expense_type=str(data.get("expense_type") or "EXPENSE").upper(), @@ -153,13 +175,15 @@ def generate_recurring_expenses(recurring_id: int): recurring = db.session.get(RecurringExpense, recurring_id) if not recurring or recurring.user_id != uid: return jsonify(error="not found"), 404 - payload = request.get_json() or {} + payload = get_json_object() + if payload is None: + return jsonify(error="json body must be an object"), 400 through_raw = payload.get("through_date") if not through_raw: return jsonify(error="through_date required"), 400 try: - through_date = date.fromisoformat(through_raw) - except ValueError: + through_date = date.fromisoformat(str(through_raw)) + except (TypeError, ValueError): return jsonify(error="invalid through_date"), 400 window_end = through_date if recurring.end_date and recurring.end_date < window_end: @@ -209,7 +233,9 @@ def update_expense(expense_id: int): e = db.session.get(Expense, expense_id) if not e or e.user_id != uid: return jsonify(error="not found"), 404 - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 if "amount" in data: amount = _parse_amount(data.get("amount")) if amount is None: @@ -220,15 +246,21 @@ def update_expense(expense_id: int): if "expense_type" in data: e.expense_type = str(data.get("expense_type") or "EXPENSE").upper() if "category_id" in data: - e.category_id = data.get("category_id") + category_id = _parse_category_id(data.get("category_id")) + if data.get("category_id") not in (None, "", "null") and category_id is None: + return jsonify(error="invalid category_id"), 400 + e.category_id = category_id if "description" in data or "notes" in data: - description = (data.get("description") or data.get("notes") or "").strip() + description = str(data.get("description") or data.get("notes") or "").strip() if not description: return jsonify(error="description required"), 400 e.notes = description if "date" in data or "spent_at" in data: raw_date = data.get("date") or data.get("spent_at") - e.spent_at = date.fromisoformat(raw_date) + spent_at = _parse_date(raw_date) + if spent_at is None: + return jsonify(error="invalid date"), 400 + e.spent_at = spent_at db.session.commit() _invalidate_expense_cache(uid, e.spent_at.isoformat()) return jsonify(_expense_to_dict(e)) @@ -267,9 +299,9 @@ def import_preview(): transactions = expense_import.normalize_import_rows(rows) except ValueError as exc: return jsonify(error=str(exc)), 400 - except Exception as exc: # pragma: no cover + except Exception: # pragma: no cover logger.exception("Import preview failed user=%s", uid) - return jsonify(error=f"failed to parse statement: {exc}"), 500 + return jsonify(error="failed to parse statement"), 500 duplicates = sum(1 for t in transactions if _is_duplicate(uid, t)) return jsonify( total=len(transactions), duplicates=duplicates, transactions=transactions @@ -281,11 +313,18 @@ def import_preview(): def import_commit(): uid = int(get_jwt_identity()) user = db.session.get(User, uid) - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 rows = data.get("transactions") or [] if not isinstance(rows, list) or not rows: return jsonify(error="transactions required"), 400 - transactions = expense_import.normalize_import_rows(rows) + try: + transactions = expense_import.normalize_import_rows(rows) + except ValueError as exc: + return jsonify(error=str(exc)), 400 + if not transactions: + return jsonify(error="no valid transactions"), 400 inserted = 0 duplicates = 0 touched_months: set[str] = set() @@ -345,6 +384,27 @@ def _parse_amount(raw) -> Decimal | None: return None +def _parse_date(raw) -> date | None: + if not raw: + return None + try: + return date.fromisoformat(str(raw)) + except ValueError: + return None + + +def _parse_category_id(raw) -> int | None: + if raw in (None, "", "null"): + return None + try: + category_id = int(raw) + except (TypeError, ValueError): + return None + if category_id <= 0: + return None + return category_id + + def _parse_recurring_cadence(raw: str | None) -> str | None: val = str(raw or "").upper().strip() if val in {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}: diff --git a/packages/backend/app/routes/insights.py b/packages/backend/app/routes/insights.py index bfc02e43..1057b4fc 100644 --- a/packages/backend/app/routes/insights.py +++ b/packages/backend/app/routes/insights.py @@ -13,6 +13,8 @@ def budget_suggestion(): uid = int(get_jwt_identity()) ym = (request.args.get("month") or date.today().strftime("%Y-%m")).strip() + if not _is_valid_month(ym): + return jsonify(error="invalid month, expected YYYY-MM"), 400 user_gemini_key = (request.headers.get("X-Gemini-Api-Key") or "").strip() or None persona = (request.headers.get("X-Insight-Persona") or "").strip() or None suggestion = monthly_budget_suggestion( @@ -23,3 +25,12 @@ def budget_suggestion(): ) logger.info("Budget suggestion served user=%s month=%s", uid, ym) return jsonify(suggestion) + + +def _is_valid_month(ym: str) -> bool: + if len(ym) != 7 or ym[4] != "-": + return False + year, month = ym.split("-") + if not (year.isdigit() and month.isdigit()): + return False + return 1 <= int(month) <= 12 diff --git a/packages/backend/app/routes/reminders.py b/packages/backend/app/routes/reminders.py index 9ed7ea50..ee6f9967 100644 --- a/packages/backend/app/routes/reminders.py +++ b/packages/backend/app/routes/reminders.py @@ -1,9 +1,10 @@ from datetime import datetime, time, timedelta -from flask import Blueprint, jsonify, request +from flask import Blueprint, jsonify from flask_jwt_extended import jwt_required, get_jwt_identity from ..extensions import db from ..models import Bill, Reminder from ..observability import track_reminder_event +from ..request_utils import get_json_object from ..services.reminders import send_reminder import logging @@ -40,11 +41,19 @@ def list_reminders(): @jwt_required() def create_reminder(): uid = int(get_jwt_identity()) - data = request.get_json() or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 + message = str(data.get("message") or "").strip() + if not message: + return jsonify(error="message required"), 400 + send_at = _parse_send_at(data.get("send_at")) + if send_at is None: + return jsonify(error="invalid send_at"), 400 r = Reminder( user_id=uid, - message=data["message"], - send_at=datetime.fromisoformat(data["send_at"]), + message=message, + send_at=send_at, channel=data.get("channel", "email"), ) db.session.add(r) @@ -61,7 +70,9 @@ def schedule_bill_reminders(bill_id: int): bill = db.session.get(Bill, bill_id) if not bill or bill.user_id != uid: return jsonify(error="not found"), 404 - data = request.get_json(silent=True) or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 offsets = data.get("offsets_days") if offsets is None: offsets = [7, 3, 1] @@ -125,7 +136,9 @@ def autopay_result_followup(bill_id: int): bill = db.session.get(Bill, bill_id) if not bill or bill.user_id != uid: return jsonify(error="not found"), 404 - data = request.get_json(silent=True) or {} + data = get_json_object() + if data is None: + return jsonify(error="json body must be an object"), 400 status = str(data.get("status") or "").upper().strip() if status not in {"SUCCESS", "FAILED"}: return jsonify(error="status must be SUCCESS or FAILED"), 400 @@ -221,3 +234,12 @@ def _create_reminder_if_missing( ) ) return True + + +def _parse_send_at(raw) -> datetime | None: + if not raw: + return None + try: + return datetime.fromisoformat(str(raw)) + except ValueError: + return None diff --git a/packages/backend/app/services/expense_import.py b/packages/backend/app/services/expense_import.py index 712ad62e..c7014d25 100644 --- a/packages/backend/app/services/expense_import.py +++ b/packages/backend/app/services/expense_import.py @@ -45,14 +45,15 @@ def extract_transactions_from_statement( def normalize_import_rows(rows: list[dict[str, Any]]) -> list[dict[str, Any]]: normalized: list[dict[str, Any]] = [] for row in rows: + if not isinstance(row, dict): + raise ValueError("invalid transaction payload") dt = _normalize_date(row.get("date")) amt = _normalize_amount(row.get("amount")) desc = str(row.get("description") or "").strip() if not dt or amt is None or not desc: continue expense_type = _infer_expense_type(row.get("expense_type"), desc, amt) - cid = row.get("category_id") - category_id = int(cid) if cid not in (None, "", "null") else None + category_id = _normalize_category_id(row.get("category_id")) normalized.append( { "date": dt, @@ -66,6 +67,16 @@ def normalize_import_rows(rows: list[dict[str, Any]]) -> list[dict[str, Any]]: return normalized +def _normalize_category_id(value: Any) -> int | None: + if value in (None, "", "null"): + return None + try: + category_id = int(value) + except (TypeError, ValueError) as exc: + raise ValueError("invalid category_id") from exc + return category_id if category_id > 0 else None + + def _parse_csv_rows(data: bytes) -> list[dict[str, Any]]: text = data.decode("utf-8-sig", errors="ignore") reader = csv.DictReader(io.StringIO(text)) diff --git a/packages/backend/docker-entrypoint.sh b/packages/backend/docker-entrypoint.sh new file mode 100755 index 00000000..db9380d4 --- /dev/null +++ b/packages/backend/docker-entrypoint.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh +set -eu + +if [ "${FINMIND_RUN_INIT_DB_ON_BOOT:-0}" = "1" ]; then + python -m flask --app wsgi:app init-db +fi + +export PROMETHEUS_MULTIPROC_DIR="${PROMETHEUS_MULTIPROC_DIR:-/tmp/prometheus_multiproc}" +rm -rf "$PROMETHEUS_MULTIPROC_DIR" +mkdir -p "$PROMETHEUS_MULTIPROC_DIR" + +if [ "$#" -gt 0 ]; then + exec "$@" +fi + +exec gunicorn --workers=2 --threads=4 --bind 0.0.0.0:8000 wsgi:app diff --git a/packages/backend/tests/test_auth.py b/packages/backend/tests/test_auth.py index 7b22b0e3..6020d685 100644 --- a/packages/backend/tests/test_auth.py +++ b/packages/backend/tests/test_auth.py @@ -8,6 +8,8 @@ def test_auth_refresh_flow(client): # Login to get tokens r = client.post("/auth/login", json={"email": email, "password": password}) assert r.status_code == 200 + assert r.headers["Cache-Control"] == "no-store" + assert r.headers["Pragma"] == "no-cache" data = r.get_json() assert "access_token" in data and "refresh_token" in data @@ -17,6 +19,7 @@ def test_auth_refresh_flow(client): "/auth/refresh", headers={"Authorization": f"Bearer {refresh_token}"} ) assert r.status_code == 200 + assert r.headers["Cache-Control"] == "no-store" new_access = r.get_json().get("access_token") assert isinstance(new_access, str) and len(new_access) > 10 @@ -66,3 +69,13 @@ def test_auth_me_and_update_preferred_currency(client): r = client.patch("/auth/me", json={"preferred_currency": "ZZZ"}, headers=auth) assert r.status_code == 400 + + +def test_auth_rejects_non_object_json_body(client): + r = client.post( + "/auth/register", + data='["bad"]', + content_type="application/json", + ) + assert r.status_code == 400 + assert r.get_json() == {"error": "json body must be an object"} diff --git a/packages/backend/tests/test_bills.py b/packages/backend/tests/test_bills.py index 9d7f585a..544371ec 100644 --- a/packages/backend/tests/test_bills.py +++ b/packages/backend/tests/test_bills.py @@ -54,3 +54,52 @@ def test_bill_create_defaults_to_user_preferred_currency(client, auth_header): created = next((item for item in r.get_json() if item["id"] == bill_id), None) assert created is not None assert created["currency"] == "INR" + + +def test_bill_create_rejects_invalid_input_with_400(client, auth_header): + r = client.post( + "/bills", + json={"name": "", "amount": "oops", "next_due_date": "2026-02-31"}, + headers=auth_header, + ) + assert r.status_code == 400 + assert r.get_json()["error"] in { + "name required", + "invalid amount", + "invalid next_due_date", + } + + +def test_bill_create_rejects_non_object_json_body(client, auth_header): + r = client.post( + "/bills", + data='["bad"]', + content_type="application/json", + headers=auth_header, + ) + assert r.status_code == 400 + assert r.get_json() == {"error": "json body must be an object"} + + +def test_bill_create_normalizes_string_boolean_flags(client, auth_header): + r = client.post( + "/bills", + json={ + "name": "Utilities", + "amount": 22.0, + "next_due_date": date.today().isoformat(), + "cadence": "MONTHLY", + "autopay_enabled": "false", + "channel_email": "false", + "channel_whatsapp": "true", + }, + headers=auth_header, + ) + assert r.status_code == 201 + bill_id = r.get_json()["id"] + + r = client.get("/bills", headers=auth_header) + created = next(item for item in r.get_json() if item["id"] == bill_id) + assert created["autopay_enabled"] is False + assert created["channel_email"] is False + assert created["channel_whatsapp"] is True diff --git a/packages/backend/tests/test_categories.py b/packages/backend/tests/test_categories.py index e0d3b3cc..f098fc22 100644 --- a/packages/backend/tests/test_categories.py +++ b/packages/backend/tests/test_categories.py @@ -36,3 +36,14 @@ def test_categories_crud_flow(client, auth_header): r = client.get("/categories", headers=auth_header) assert r.status_code == 200 assert r.get_json() == [] + + +def test_categories_reject_non_object_json_body(client, auth_header): + r = client.post( + "/categories", + data='["bad"]', + content_type="application/json", + headers=auth_header, + ) + assert r.status_code == 400 + assert r.get_json() == {"error": "json body must be an object"} diff --git a/packages/backend/tests/test_config.py b/packages/backend/tests/test_config.py new file mode 100644 index 00000000..29570fd4 --- /dev/null +++ b/packages/backend/tests/test_config.py @@ -0,0 +1,8 @@ +from app.config import Settings + + +def test_cors_origins_include_container_frontend_origin(): + cfg = Settings() + + assert "http://frontend" in cfg.cors_origins + assert "http://frontend:80" in cfg.cors_origins diff --git a/packages/backend/tests/test_expenses.py b/packages/backend/tests/test_expenses.py index fda27362..2a3887da 100644 --- a/packages/backend/tests/test_expenses.py +++ b/packages/backend/tests/test_expenses.py @@ -76,6 +76,29 @@ def test_expense_create_defaults_to_user_preferred_currency(client, auth_header) assert created["currency"] == "INR" +def test_expense_create_rejects_invalid_date_with_400(client, auth_header): + payload = { + "amount": 99.5, + "description": "Broken date payload", + "date": "2026-02-31", + } + r = client.post("/expenses", json=payload, headers=auth_header) + assert r.status_code == 400 + assert r.get_json() == {"error": "invalid date"} + + +def test_expense_create_rejects_invalid_category_id(client, auth_header): + payload = { + "amount": 99.5, + "description": "Bad category payload", + "date": "2026-02-12", + "category_id": "oops", + } + r = client.post("/expenses", json=payload, headers=auth_header) + assert r.status_code == 400 + assert r.get_json() == {"error": "invalid category_id"} + + def test_expense_import_preview_and_commit_prevents_duplicates(client, auth_header): cat_id = _create_category(client, auth_header) @@ -185,6 +208,86 @@ def test_expense_import_preview_pdf_fallback_without_gemini( assert tx[1]["expense_type"] == "INCOME" +def test_expense_import_preview_hides_internal_errors(client, auth_header, monkeypatch): + monkeypatch.setattr( + "app.services.expense_import.extract_transactions_from_statement", + lambda **_kwargs: (_ for _ in ()).throw(RuntimeError("provider blew up")), + ) + + data = {"file": (BytesIO(b"%PDF-1.4 fake"), "statement.pdf")} + r = client.post( + "/expenses/import/preview", + data=data, + content_type="multipart/form-data", + headers=auth_header, + ) + + assert r.status_code == 500 + assert r.get_json() == {"error": "failed to parse statement"} + + +def test_expense_import_commit_rejects_invalid_category_id(client, auth_header): + r = client.post( + "/expenses/import/commit", + json={ + "transactions": [ + { + "date": "2026-02-10", + "amount": 10.5, + "description": "Coffee", + "category_id": "oops", + } + ] + }, + headers=auth_header, + ) + + assert r.status_code == 400 + assert r.get_json() == {"error": "invalid category_id"} + + +def test_expense_import_preview_rejects_oversized_upload(client, auth_header): + client.application.config["MAX_CONTENT_LENGTH"] = 128 + + data = {"file": (BytesIO(b"x" * 2048), "statement.csv")} + r = client.post( + "/expenses/import/preview", + data=data, + content_type="multipart/form-data", + headers=auth_header, + ) + + assert r.status_code == 413 + assert r.get_json() == {"error": "upload too large"} + + +def test_expense_import_commit_rejects_non_object_transactions(client, auth_header): + r = client.post( + "/expenses/import/commit", + json={"transactions": ["bad-row"]}, + headers=auth_header, + ) + + assert r.status_code == 400 + assert r.get_json() == {"error": "invalid transaction payload"} + + +def test_recurring_expense_create_rejects_invalid_category_id(client, auth_header): + r = client.post( + "/expenses/recurring", + json={ + "amount": 40.0, + "description": "Subscription", + "cadence": "MONTHLY", + "start_date": "2026-01-01", + "category_id": "oops", + }, + headers=auth_header, + ) + assert r.status_code == 400 + assert r.get_json() == {"error": "invalid category_id"} + + def test_recurring_expense_create_list_and_generate(client, auth_header): cat_id = _create_category(client, auth_header, name="Rent") diff --git a/packages/backend/tests/test_health.py b/packages/backend/tests/test_health.py new file mode 100644 index 00000000..ff5ab96d --- /dev/null +++ b/packages/backend/tests/test_health.py @@ -0,0 +1,86 @@ +import app as app_module + + +def test_health_ready_reports_connected_dependencies(client, monkeypatch): + monkeypatch.setattr(app_module.db.session, "execute", lambda _query: None) + monkeypatch.setattr(app_module.redis_client, "ping", lambda: True) + + response = client.get("/health/ready") + + assert response.status_code == 200 + assert response.get_json() == { + "status": "ok", + "checks": {"database": "connected", "redis": "connected"}, + } + + +def test_health_ready_returns_503_when_database_check_fails(client, monkeypatch): + def fail_database(_query): + raise RuntimeError("db down") + + monkeypatch.setattr(app_module.db.session, "execute", fail_database) + monkeypatch.setattr(app_module.redis_client, "ping", lambda: True) + + response = client.get("/health/ready") + + assert response.status_code == 503 + assert response.get_json() == { + "status": "error", + "checks": {"database": "error", "redis": "connected"}, + } + + +def test_health_ready_returns_503_when_redis_check_fails(client, monkeypatch): + def fail_redis(): + raise RuntimeError("redis down") + + monkeypatch.setattr(app_module.db.session, "execute", lambda _query: None) + monkeypatch.setattr(app_module.redis_client, "ping", fail_redis) + + response = client.get("/health/ready") + + assert response.status_code == 503 + assert response.get_json() == { + "status": "error", + "checks": {"database": "connected", "redis": "error"}, + } + + +def test_health_uses_explicit_cors_origins(client): + response = client.get("/health", headers={"Origin": "http://localhost:8081"}) + + assert response.status_code == 200 + assert response.headers["Access-Control-Allow-Origin"] == "http://localhost:8081" + assert "Access-Control-Allow-Credentials" not in response.headers + + +def test_health_sets_default_security_headers(client): + response = client.get("/health") + + assert response.status_code == 200 + assert response.headers["X-Content-Type-Options"] == "nosniff" + assert response.headers["X-Frame-Options"] == "DENY" + assert response.headers["Referrer-Policy"] == "strict-origin-when-cross-origin" + + +def test_schema_compatibility_patch_is_skipped_when_database_is_unreachable( + app_fixture, monkeypatch, caplog +): + class FakeEngine: + class dialect: + name = "postgresql" + + def raw_connection(self): + raise RuntimeError("database unavailable") + + monkeypatch.setattr( + type(app_module.db), "engine", property(lambda _self: FakeEngine()) + ) + + with app_fixture.app_context(): + with caplog.at_level("WARNING"): + app_module._ensure_schema_compatibility(app_fixture) + + assert ( + "Skipping schema compatibility patch until database is reachable" in caplog.text + ) diff --git a/packages/backend/tests/test_insights.py b/packages/backend/tests/test_insights.py index 84f1d4ba..5700a177 100644 --- a/packages/backend/tests/test_insights.py +++ b/packages/backend/tests/test_insights.py @@ -90,3 +90,12 @@ def _boom(*_args, **_kwargs): assert payload["method"] == "heuristic" assert "warnings" in payload assert "gemini_unavailable" in payload["warnings"] + + +def test_budget_suggestion_rejects_invalid_month_with_400(client, auth_header): + r = client.get( + "/insights/budget-suggestion?month=2026-13", + headers=auth_header, + ) + assert r.status_code == 400 + assert r.get_json() == {"error": "invalid month, expected YYYY-MM"} diff --git a/packages/backend/tests/test_reminders.py b/packages/backend/tests/test_reminders.py index 4bfe9f3f..9c5396b5 100644 --- a/packages/backend/tests/test_reminders.py +++ b/packages/backend/tests/test_reminders.py @@ -80,3 +80,27 @@ def test_autopay_generates_precheck_and_result_followup_for_both_channels( followups = [x for x in reminders if "Autopay succeeded" in x["message"]] assert len(followups) == 2 assert sorted([x["channel"] for x in followups]) == ["email", "whatsapp"] + + +def test_create_reminder_rejects_invalid_payload_with_400(client, auth_header): + r = client.post( + "/reminders", + json={"message": "", "send_at": "not-a-date"}, + headers=auth_header, + ) + assert r.status_code == 400 + assert r.get_json()["error"] in {"message required", "invalid send_at"} + + +def test_bill_reminder_schedule_rejects_non_object_json_body(client, auth_header): + bill_id = _create_bill(client, auth_header, due_date="2026-03-20") + + r = client.post( + f"/reminders/bills/{bill_id}/schedule", + data='["bad"]', + content_type="application/json", + headers=auth_header, + ) + + assert r.status_code == 400 + assert r.get_json() == {"error": "json body must be an object"} diff --git a/scripts/build-demo-video.sh b/scripts/build-demo-video.sh new file mode 100755 index 00000000..90a29964 --- /dev/null +++ b/scripts/build-demo-video.sh @@ -0,0 +1,127 @@ +#!/usr/bin/env sh +set -eu + +ARTIFACT_DIR="${1:-tmp/demo_build}" +OUTPUT_FILE="${2:-docs/demo/finmind-deploy-demo.mp4}" +WORK_DIR="${ARTIFACT_DIR}/video_work" + +LOGIN_IMAGE="docs/demo/review-login.png" +DASHBOARD_IMAGE="docs/demo/review-dashboard.png" +FONT_TITLE="/System/Library/Fonts/Supplemental/Arial Bold.ttf" +FONT_BODY="/System/Library/Fonts/SFNSMono.ttf" + +mkdir -p "$WORK_DIR" + +if [ ! -f "$ARTIFACT_DIR/compose-ps.txt" ] || [ ! -f "$ARTIFACT_DIR/review-checks.txt" ]; then + echo "Missing review artifact text files in $ARTIFACT_DIR" >&2 + exit 1 +fi + +if [ ! -f "$LOGIN_IMAGE" ] || [ ! -f "$DASHBOARD_IMAGE" ]; then + echo "Missing demo screenshots under docs/demo/" >&2 + exit 1 +fi + +python3 - "$ARTIFACT_DIR" "$WORK_DIR" "$LOGIN_IMAGE" "$DASHBOARD_IMAGE" "$FONT_TITLE" "$FONT_BODY" <<'PY' +from pathlib import Path +import sys +from PIL import Image, ImageDraw, ImageFont + + +artifact_dir = Path(sys.argv[1]) +work_dir = Path(sys.argv[2]) +login_image = Path(sys.argv[3]) +dashboard_image = Path(sys.argv[4]) +font_title = sys.argv[5] +font_body = sys.argv[6] + +W, H = 1280, 720 + + +def load_font(path: str, size: int) -> ImageFont.FreeTypeFont: + return ImageFont.truetype(path, size) + + +def draw_multiline(draw: ImageDraw.ImageDraw, text: str, font, fill, x: int, y: int, line_gap: int = 10): + cursor = y + for line in text.splitlines(): + draw.text((x, cursor), line, font=font, fill=fill) + cursor += font.size + line_gap + + +def render_card(name: str, title: str, body: str, *, bg: str, title_color: str = "#ffffff", body_color: str = "#dbeafe"): + image = Image.new("RGB", (W, H), bg) + draw = ImageDraw.Draw(image) + title_font = load_font(font_title, 42) + body_font = load_font(font_body, 24) + draw.text((72, 54), title, font=title_font, fill=title_color) + draw_multiline(draw, body, body_font, body_color, 72, 140, 12) + image.save(work_dir / name) + + +def render_labeled_screenshot(source: Path, name: str, title: str): + base = Image.new("RGB", (W, H), "#06101b") + shot = Image.open(source).convert("RGB") + shot.thumbnail((W - 120, H - 160)) + x = (W - shot.width) // 2 + y = 100 + (H - 140 - shot.height) // 2 + base.paste(shot, (x, y)) + draw = ImageDraw.Draw(base) + title_font = load_font(font_title, 30) + draw.rectangle((0, 0, W, 84), fill="#06101b") + draw.text((60, 24), title, font=title_font, fill="#ffffff") + base.save(work_dir / name) + + +compose_body = (artifact_dir / "compose-ps.txt").read_text(encoding="utf-8").strip() +checks_body = (artifact_dir / "review-checks.txt").read_text(encoding="utf-8").strip() + +render_card( + "01-title.png", + "FinMind deployment review path", + "One-command verification, explicit dependency readiness,\nand a downloadable review artifact bundle for maintainer re-checks.", + bg="#09111f", +) +render_card( + "02-compose.png", + "Healthy services snapshot", + compose_body, + bg="#0f172a", +) +render_card( + "03-checks.png", + "Readiness and observability checks", + checks_body, + bg="#111827", + body_color="#d1fae5", +) +render_labeled_screenshot(login_image, "04-login.png", "Login flow available on the deployed app") +render_labeled_screenshot(dashboard_image, "05-dashboard.png", "Dashboard state after successful auth") +render_card( + "06-free-tier.png", + "Free-tier deployment entrypoints included", + "Render: render.yaml\nNetlify: netlify.toml\nVercel: vercel.json\nMaintainer quick path: ./scripts/review-deploy.sh", + bg="#071521", + body_color="#fef3c7", +) +PY + +ffmpeg -y -loop 1 -t 4 -i "$WORK_DIR/01-title.png" -c:v libx264 -pix_fmt yuv420p "$WORK_DIR/01-title.mp4" >/dev/null 2>&1 +ffmpeg -y -loop 1 -t 8 -i "$WORK_DIR/02-compose.png" -c:v libx264 -pix_fmt yuv420p "$WORK_DIR/02-compose.mp4" >/dev/null 2>&1 +ffmpeg -y -loop 1 -t 8 -i "$WORK_DIR/03-checks.png" -c:v libx264 -pix_fmt yuv420p "$WORK_DIR/03-checks.mp4" >/dev/null 2>&1 +ffmpeg -y -loop 1 -t 6 -i "$WORK_DIR/04-login.png" -c:v libx264 -pix_fmt yuv420p "$WORK_DIR/04-login.mp4" >/dev/null 2>&1 +ffmpeg -y -loop 1 -t 8 -i "$WORK_DIR/05-dashboard.png" -c:v libx264 -pix_fmt yuv420p "$WORK_DIR/05-dashboard.mp4" >/dev/null 2>&1 +ffmpeg -y -loop 1 -t 6 -i "$WORK_DIR/06-free-tier.png" -c:v libx264 -pix_fmt yuv420p "$WORK_DIR/06-free-tier.mp4" >/dev/null 2>&1 + +cat > "$WORK_DIR/concat.txt" </dev/null 2>&1 + +printf '%s\n' "Updated demo video: $OUTPUT_FILE" diff --git a/scripts/deploy-prod.ps1 b/scripts/deploy-prod.ps1 new file mode 100644 index 00000000..e31d0997 --- /dev/null +++ b/scripts/deploy-prod.ps1 @@ -0,0 +1,14 @@ +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +if (-not (Test-Path .env)) { + throw ".env not found. Copy .env.example to .env and fill the required secrets first." +} + +if ($env:FINMIND_COMPOSE_PROFILES) { + $env:COMPOSE_PROFILES = $env:FINMIND_COMPOSE_PROFILES +} elseif ($env:COMPOSE_PROFILES) { + $env:COMPOSE_PROFILES = $env:COMPOSE_PROFILES +} + +docker compose -f docker-compose.prod.yml up -d --build diff --git a/scripts/deploy-prod.sh b/scripts/deploy-prod.sh new file mode 100755 index 00000000..2198756a --- /dev/null +++ b/scripts/deploy-prod.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env sh +set -eu + +if [ ! -f .env ]; then + echo ".env not found. Copy .env.example to .env and fill the required secrets first." >&2 + exit 1 +fi + +if [ -n "${FINMIND_COMPOSE_PROFILES:-${COMPOSE_PROFILES:-}}" ]; then + export COMPOSE_PROFILES="${FINMIND_COMPOSE_PROFILES:-${COMPOSE_PROFILES:-}}" +fi + +docker compose -f docker-compose.prod.yml up -d --build diff --git a/scripts/record-browser-demo.mjs b/scripts/record-browser-demo.mjs new file mode 100755 index 00000000..076dd63b --- /dev/null +++ b/scripts/record-browser-demo.mjs @@ -0,0 +1,28 @@ +import { runUiValidation } from './validate-ui.mjs'; + +const [ + outputPathArg, + baseUrlArg = 'http://127.0.0.1:8081', + healthUrlArg = 'http://127.0.0.1:8000/health/ready', +] = process.argv.slice(2); + +if (!outputPathArg) { + console.error('Usage: node scripts/record-browser-demo.mjs [base-url] [health-url]'); + process.exit(1); +} + +runUiValidation({ + baseUrl: baseUrlArg, + healthUrl: healthUrlArg, + providerName: 'demo recording', + recordVideoPath: outputPathArg, +}) + .then((recordedPath) => { + if (recordedPath) { + console.log(recordedPath); + } + }) + .catch((error) => { + console.error(error); + process.exit(1); + }); diff --git a/scripts/record-demo-video.sh b/scripts/record-demo-video.sh new file mode 100755 index 00000000..7e06f11a --- /dev/null +++ b/scripts/record-demo-video.sh @@ -0,0 +1,89 @@ +#!/usr/bin/env sh +set -eu + +COMPOSE_FILE="${FINMIND_COMPOSE_FILE:-docker-compose.prod.yml}" +OUTPUT_FILE="${1:-docs/demo/finmind-deploy-demo.mp4}" +WORK_DIR="${2:-tmp/demo_recording}" +PLAYWRIGHT_IMAGE="${PLAYWRIGHT_IMAGE:-mcr.microsoft.com/playwright:v1.55.0-noble}" +DEMO_API_URL="${FINMIND_DEMO_API_URL:-http://backend:8000}" +FRONTEND_ENV_PATH="/usr/share/nginx/html/env.js" +FRONTEND_ENV_BACKUP="/tmp/finmind-demo-env.js.bak" +EXPORT_SCREENSHOTS="${FINMIND_EXPORT_SCREENSHOTS:-0}" +DEMO_STAMP="${FINMIND_DEMO_STAMP:-$(date +%Y%m%d)}" +DEMO_SHA="${FINMIND_DEMO_SHA:-$(git rev-parse --short HEAD)}" +SCREENSHOT_WORK_DIR="$WORK_DIR/screenshots" + +mkdir -p "$WORK_DIR" +RAW_VIDEO="$WORK_DIR/finmind-browser-demo.webm" + +docker compose -f "$COMPOSE_FILE" up -d --build backend frontend >/dev/null + +NETWORK_NAME="$( + docker inspect "$(docker compose -f "$COMPOSE_FILE" ps -q frontend)" \ + --format '{{range $name, $_ := .NetworkSettings.Networks}}{{println $name}}{{end}}' \ + | head -n 1 +)" + +if [ -z "$NETWORK_NAME" ]; then + echo "Could not determine the Compose network for the frontend service." >&2 + exit 1 +fi + +restore_frontend_env() { + docker compose -f "$COMPOSE_FILE" exec -T frontend sh -lc " + if [ -f '$FRONTEND_ENV_BACKUP' ]; then + cp '$FRONTEND_ENV_BACKUP' '$FRONTEND_ENV_PATH' + rm -f '$FRONTEND_ENV_BACKUP' + fi + " >/dev/null 2>&1 || true +} + +trap restore_frontend_env EXIT HUP INT TERM + +docker compose -f "$COMPOSE_FILE" exec -T frontend sh -lc " + cp '$FRONTEND_ENV_PATH' '$FRONTEND_ENV_BACKUP' + cat > '$FRONTEND_ENV_PATH' <<'EOF' +window.__FINMIND_API_URL__ = \"${DEMO_API_URL}\"; +EOF +" >/dev/null + +docker run --rm \ + --ipc=host \ + --network "$NETWORK_NAME" \ + -v "$PWD":/work \ + -w /work \ + "$PLAYWRIGHT_IMAGE" \ + sh -lc ' + mkdir -p /tmp/finmind-playwright && + cd /tmp/finmind-playwright && + npm init -y >/dev/null 2>&1 && + npm install --silent playwright@1.55.0 >/dev/null 2>&1 && + cp /work/scripts/validate-ui.mjs ./validate-ui.mjs && + node ./validate-ui.mjs \ + --base-url http://frontend \ + --health-url http://backend:8000/health/ready \ + --provider-name "local compose review" \ + --record-video "/work/'"$RAW_VIDEO"'" \ + --screenshot-dir "/work/'"$SCREENSHOT_WORK_DIR"'" + ' + +ffmpeg -y \ + -i "$RAW_VIDEO" \ + -vf "scale=1280:-2,fps=30" \ + -c:v libx264 \ + -preset slow \ + -crf 26 \ + -pix_fmt yuv420p \ + -movflags +faststart \ + "$OUTPUT_FILE" >/dev/null 2>&1 + +printf '%s\n' "Recorded dynamic demo video: $OUTPUT_FILE" + +if [ "$EXPORT_SCREENSHOTS" = "1" ]; then + for name in readiness signup bills expenses analytics; do + cp \ + "$SCREENSHOT_WORK_DIR/${name}.png" \ + "docs/demo/${DEMO_STAMP}-${DEMO_SHA}-${name}.png" + done + printf '%s\n' "Exported screenshots: docs/demo/${DEMO_STAMP}-${DEMO_SHA}-{readiness,signup,bills,expenses,analytics}.png" +fi diff --git a/scripts/review-deploy.sh b/scripts/review-deploy.sh new file mode 100755 index 00000000..3d02166c --- /dev/null +++ b/scripts/review-deploy.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env sh +set -eu + +COMPOSE_FILE="docker-compose.prod.yml" +KEEP_RUNNING="${FINMIND_REVIEW_KEEP_RUNNING:-0}" +CREATED_ENV=0 + +network_name() { + docker inspect "$(docker compose -f "$COMPOSE_FILE" ps -q backend)" \ + --format '{{range $name, $_ := .NetworkSettings.Networks}}{{println $name}}{{end}}' \ + | head -n 1 +} + +cleanup() { + if [ "$KEEP_RUNNING" != "1" ]; then + docker compose -f "$COMPOSE_FILE" --profile observability down -v >/dev/null 2>&1 || true + fi + if [ "$CREATED_ENV" = "1" ]; then + rm -f .env + fi +} + +trap cleanup EXIT INT TERM + +if [ ! -f .env ]; then + cp .env.example .env + CREATED_ENV=1 +fi + +export COMPOSE_PROFILES=observability + +docker compose -f "$COMPOSE_FILE" --profile observability up -d --build + +NETWORK_NAME="$(network_name)" + +docker run --rm \ + --network "$NETWORK_NAME" \ + -v "$PWD":/work \ + -w /work \ + python:3.11-slim \ + python scripts/smoke-deploy.py \ + --api-base-url http://backend:8000 \ + --frontend-url http://frontend:80 + +docker run --rm \ + --network "$NETWORK_NAME" \ + python:3.11-slim \ + python - <<'PY' +import json +import urllib.request + + +def check(url, *, contains=None): + with urllib.request.urlopen(url, timeout=20) as response: + body = response.read().decode("utf-8") + if contains and contains not in body: + raise SystemExit(f"{url} did not contain {contains!r}") + return body + + +check("http://prometheus:9090/-/ready", contains="Prometheus Server is Ready") +grafana = json.loads(check("http://grafana:3000/api/health")) +if grafana.get("database") != "ok": + raise SystemExit(f"Grafana health was not ok: {grafana}") +PY + +docker run --rm \ + -v "$PWD":/work \ + -w /work \ + alpine/helm:3.16.2 \ + lint deploy/helm/finmind + +docker run --rm \ + -v "$PWD":/work \ + -w /work \ + alpine/helm:3.16.2 \ + template finmind deploy/helm/finmind > /tmp/finmind-helm-review.yaml + +grep -q "kind: Ingress" /tmp/finmind-helm-review.yaml +grep -q "path: /health/ready" /tmp/finmind-helm-review.yaml +grep -q "name: finmind-prometheus" /tmp/finmind-helm-review.yaml +grep -q "name: finmind-grafana" /tmp/finmind-helm-review.yaml + +printf '%s\n' "FinMind deployment review passed" diff --git a/scripts/smoke-deploy.py b/scripts/smoke-deploy.py new file mode 100755 index 00000000..7d358a17 --- /dev/null +++ b/scripts/smoke-deploy.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python3 +import argparse +import json +import sys +import time +import urllib.error +import urllib.parse +import urllib.request +from datetime import date, timedelta +from typing import Any + + +def _request( + method: str, + url: str, + *, + payload: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + timeout: int = 15, +) -> tuple[int, str, dict[str, Any] | None]: + body = None + req_headers = {"Accept": "application/json"} + if payload is not None: + body = json.dumps(payload).encode("utf-8") + req_headers["Content-Type"] = "application/json" + if headers: + req_headers.update(headers) + + request = urllib.request.Request(url, data=body, headers=req_headers, method=method) + try: + with urllib.request.urlopen(request, timeout=timeout) as response: + text = response.read().decode("utf-8") + data = None + if "application/json" in (response.headers.get("Content-Type") or ""): + data = json.loads(text) + return response.status, text, data + except urllib.error.HTTPError as exc: + text = exc.read().decode("utf-8") + data = None + if "application/json" in (exc.headers.get("Content-Type") or ""): + try: + data = json.loads(text) + except json.JSONDecodeError: + data = None + return exc.code, text, data + + +def _wait_for(url: str, timeout: int = 180, interval: int = 3) -> None: + deadline = time.time() + timeout + while time.time() < deadline: + try: + status, _, _ = _request("GET", url, timeout=5) + if 200 <= status < 500: + return + except Exception: + pass + time.sleep(interval) + raise RuntimeError(f"Timed out waiting for {url}") + + +def _assert(condition: bool, message: str) -> None: + if not condition: + raise RuntimeError(message) + + +def main() -> int: + parser = argparse.ArgumentParser(description="Smoke test a FinMind deployment") + parser.add_argument("--api-base-url", default="http://127.0.0.1:8000") + parser.add_argument("--frontend-url", default="http://127.0.0.1:8081") + args = parser.parse_args() + + api = args.api_base_url.rstrip("/") + frontend = args.frontend_url.rstrip("/") + + _wait_for(f"{api}/health") + _wait_for(frontend) + + health_status, _, health_data = _request("GET", f"{api}/health") + _assert(health_status == 200, "backend health endpoint did not return 200") + _assert((health_data or {}).get("status") == "ok", "backend health payload invalid") + + ready_status, _, ready_data = _request("GET", f"{api}/health/ready") + _assert(ready_status == 200, "backend readiness endpoint did not return 200") + _assert((ready_data or {}).get("status") == "ok", "backend readiness payload invalid") + checks = (ready_data or {}).get("checks") or {} + _assert(checks.get("database") == "connected", "database readiness check failed") + _assert(checks.get("redis") == "connected", "redis readiness check failed") + + frontend_status, frontend_body, _ = _request("GET", frontend) + _assert(frontend_status == 200, "frontend root is not reachable") + _assert("FinMind" in frontend_body, "frontend root did not render FinMind shell") + + email = f"deploy-smoke-{int(time.time())}@example.com" + password = "SmokePassword123!" + register_status, _, _ = _request( + "POST", + f"{api}/auth/register", + payload={"email": email, "password": password}, + ) + _assert(register_status in (200, 201), "register flow failed") + + login_status, _, login_data = _request( + "POST", + f"{api}/auth/login", + payload={"email": email, "password": password}, + ) + _assert(login_status == 200, "login flow failed") + access_token = (login_data or {}).get("access_token") + _assert(bool(access_token), "login did not return access token") + auth = {"Authorization": f"Bearer {access_token}"} + + me_status, _, me_data = _request("GET", f"{api}/auth/me", headers=auth) + _assert(me_status == 200, "auth/me failed") + _assert((me_data or {}).get("email") == email, "auth/me returned unexpected user") + + category_status, _, _ = _request( + "POST", + f"{api}/categories", + payload={"name": "Smoke Category"}, + headers=auth, + ) + _assert(category_status in (201, 409), "category create failed") + categories_status, _, categories_data = _request( + "GET", f"{api}/categories", headers=auth + ) + _assert(categories_status == 200, "category list failed") + categories = categories_data or [] + _assert(len(categories) >= 1, "category list empty after create") + category_id = categories[0]["id"] + + today = date.today() + due = today + timedelta(days=5) + current_month = today.strftime("%Y-%m") + + expense_status, _, expense_data = _request( + "POST", + f"{api}/expenses", + payload={ + "amount": 42.5, + "currency": "USD", + "category_id": category_id, + "description": "Smoke expense", + "date": today.isoformat(), + "expense_type": "EXPENSE", + }, + headers=auth, + ) + _assert(expense_status == 201, "expense create failed") + _assert( + (expense_data or {}).get("description") == "Smoke expense", + "expense payload mismatch", + ) + + bill_status, _, bill_data = _request( + "POST", + f"{api}/bills", + payload={ + "name": "Smoke bill", + "amount": 18.75, + "currency": "USD", + "next_due_date": due.isoformat(), + "cadence": "MONTHLY", + "channel_email": True, + "channel_whatsapp": False, + }, + headers=auth, + ) + _assert(bill_status == 201, "bill create failed") + bill_id = (bill_data or {}).get("id") + _assert(bool(bill_id), "bill id missing") + + reminders_status, _, reminders_data = _request( + "POST", + f"{api}/reminders/bills/{bill_id}/schedule", + headers=auth, + ) + _assert(reminders_status == 200, "bill reminder scheduling failed") + _assert( + (reminders_data or {}).get("created", -1) >= 0, + "reminder schedule payload invalid", + ) + + reminders_list_status, _, reminders_list_data = _request( + "GET", f"{api}/reminders", headers=auth + ) + _assert(reminders_list_status == 200, "reminder list failed") + _assert(isinstance(reminders_list_data, list), "reminders list payload invalid") + + dashboard_status, _, dashboard_data = _request( + "GET", + f"{api}/dashboard/summary?month={urllib.parse.quote(current_month)}", + headers=auth, + ) + _assert(dashboard_status == 200, "dashboard summary failed") + _assert("summary" in (dashboard_data or {}), "dashboard summary payload missing") + _assert( + isinstance((dashboard_data or {}).get("upcoming_bills"), list), + "dashboard upcoming bills missing", + ) + + insights_status, _, insights_data = _request( + "GET", + f"{api}/insights/budget-suggestion?month={urllib.parse.quote(current_month)}", + headers=auth, + ) + _assert(insights_status == 200, "insights endpoint failed") + _assert((insights_data or {}).get("month") == current_month, "insights month mismatch") + + print("FinMind deployment smoke check passed") + return 0 + + +if __name__ == "__main__": + try: + raise SystemExit(main()) + except Exception as exc: + print(f"Smoke check failed: {exc}", file=sys.stderr) + raise SystemExit(1) diff --git a/scripts/validate-deploy.sh b/scripts/validate-deploy.sh new file mode 100755 index 00000000..7145eccb --- /dev/null +++ b/scripts/validate-deploy.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env sh +set -eu + +API_BASE_URL="${FINMIND_API_BASE_URL:-http://127.0.0.1:8000}" +FRONTEND_URL="${FINMIND_FRONTEND_URL:-http://127.0.0.1:8081}" + +python3 scripts/smoke-deploy.py \ + --api-base-url "$API_BASE_URL" \ + --frontend-url "$FRONTEND_URL" diff --git a/scripts/validate-observability.sh b/scripts/validate-observability.sh new file mode 100755 index 00000000..772d9d06 --- /dev/null +++ b/scripts/validate-observability.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env sh +set -eu + +PROMETHEUS_URL="${FINMIND_PROMETHEUS_URL:-http://127.0.0.1:9090}" +GRAFANA_URL="${FINMIND_GRAFANA_URL:-http://127.0.0.1:3000}" + +python3 - "$PROMETHEUS_URL" "$GRAFANA_URL" <<'PY' +import json +import sys +import time +import urllib.error +import urllib.request + +prometheus_url = sys.argv[1].rstrip("/") +grafana_url = sys.argv[2].rstrip("/") +required_jobs = {"backend", "postgres", "redis", "nginx"} + +def fetch_json(url: str, timeout: int = 10): + with urllib.request.urlopen(url, timeout=timeout) as response: + return response.status, json.loads(response.read().decode("utf-8")) + +def fetch_text(url: str, timeout: int = 10): + with urllib.request.urlopen(url, timeout=timeout) as response: + return response.status, response.read().decode("utf-8") + +def wait_for(check, timeout: int = 120, interval: int = 3): + deadline = time.time() + timeout + last_error = None + while time.time() < deadline: + try: + result = check() + if result: + return result + except Exception as exc: + last_error = exc + time.sleep(interval) + if last_error: + raise last_error + raise RuntimeError("Timed out waiting for observability stack") + +wait_for(lambda: fetch_text(f"{prometheus_url}/-/ready")[0] == 200) +status, grafana = wait_for(lambda: fetch_json(f"{grafana_url}/api/health")) +if status != 200 or grafana.get("database") != "ok": + raise RuntimeError(f"Grafana health invalid: {grafana}") + +_, targets_payload = wait_for(lambda: fetch_json(f"{prometheus_url}/api/v1/targets")) +active_targets = targets_payload.get("data", {}).get("activeTargets", []) +healthy_jobs = { + target.get("labels", {}).get("job") + for target in active_targets + if target.get("health") == "up" +} +missing = sorted(required_jobs - healthy_jobs) +if missing: + raise RuntimeError(f"Prometheus targets not healthy: {', '.join(missing)}") + +print("FinMind observability smoke check passed") +PY diff --git a/scripts/validate-public-deployment.sh b/scripts/validate-public-deployment.sh new file mode 100755 index 00000000..d094e91e --- /dev/null +++ b/scripts/validate-public-deployment.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env sh +set -eu + +FRONTEND_URL="" +API_BASE_URL="" +PROVIDER_NAME="public deployment" +PLAYWRIGHT_IMAGE="${FINMIND_PLAYWRIGHT_IMAGE:-mcr.microsoft.com/playwright:v1.55.0-noble}" + +while [ "$#" -gt 0 ]; do + case "$1" in + --frontend-url) + FRONTEND_URL="$2" + shift 2 + ;; + --api-base-url) + API_BASE_URL="$2" + shift 2 + ;; + --provider-name) + PROVIDER_NAME="$2" + shift 2 + ;; + *) + echo "Unknown argument: $1" >&2 + exit 1 + ;; + esac +done + +if [ -z "$FRONTEND_URL" ] || [ -z "$API_BASE_URL" ]; then + echo "Usage: ./scripts/validate-public-deployment.sh --frontend-url --api-base-url [--provider-name