Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
48 changes: 20 additions & 28 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,34 +18,29 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
- name: Install uv
uses: astral-sh/setup-uv@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'pip'
version: "latest"

- name: Set up Python
run: uv python install ${{ env.PYTHON_VERSION }}

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-dev.txt
run: uv sync --all-extras

- name: Run Black (Code Formatting Check)
run: |
black --check --diff app/ domains/ tests/ scripts/
run: uv run black --check --diff app/ domains/ tests/ scripts/

- name: Run Ruff (Linting)
run: |
ruff check app/ domains/ tests/ scripts/
run: uv run ruff check app/ domains/ tests/ scripts/

- name: Run mypy (Type Checking)
run: |
mypy app/ domains/ scripts/
run: uv run mypy app/ domains/ scripts/
continue-on-error: true # Allow failures initially during migration

- name: Check import sorting (isort)
run: |
isort --check-only --diff app/ domains/ tests/ scripts/
run: uv run isort --check-only --diff app/ domains/ tests/ scripts/

test:
name: Run Tests
Expand All @@ -71,22 +66,21 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
- name: Install uv
uses: astral-sh/setup-uv@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'pip'
version: "latest"

- name: Set up Python
run: uv python install ${{ env.PYTHON_VERSION }}

- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y tesseract-ocr tesseract-ocr-eng

- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-dev.txt
run: uv sync --all-extras

- name: Wait for Neo4j
run: |
Expand All @@ -100,16 +94,14 @@ jobs:
done

- name: Run unit tests
run: |
pytest tests/unit -v --cov=app --cov=domains --cov-report=xml --cov-report=term
run: uv run pytest tests/unit -v --cov=app --cov=domains --cov-report=xml --cov-report=term
env:
NEO4J_URI: bolt://localhost:7687
NEO4J_USER: neo4j
NEO4J_PASSWORD: testpassword

- name: Run service tests
run: |
pytest tests/service -v --cov=app --cov=domains --cov-append --cov-report=xml --cov-report=term
run: uv run pytest tests/service -v --cov=app --cov=domains --cov-append --cov-report=xml --cov-report=term
env:
NEO4J_URI: bolt://localhost:7687
NEO4J_USER: neo4j
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ neo4j_logs/
.pytest_cache/
.coverage
htmlcov/
coverage.xml
.tox/

# Temporary files
Expand Down
20 changes: 13 additions & 7 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,21 @@ RUN apt-get update && apt-get install -y \
# Set working directory
WORKDIR /app

# Copy requirements first for layer caching
COPY requirements.txt .
COPY requirements-dev.txt .
# Install uv
RUN pip install uv

# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --no-cache-dir -r requirements-dev.txt
# Copy project configuration and lock file first for better layer caching
COPY pyproject.toml uv.lock ./

# Copy application code
# Copy source directories needed for package installation
COPY app/ app/
COPY domains/ domains/
COPY schemas/ schemas/

# Install Python dependencies using uv sync for reproducible builds
RUN uv sync --no-dev --frozen --no-cache
Copy link

Copilot AI Jan 14, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The --no-dev flag excludes development dependencies, but --frozen requires the lockfile to match exactly. If the lockfile includes dev dependencies in its resolution, this could cause issues. Consider using --only-prod or ensure the lockfile is generated without dev dependencies for production builds.

Copilot uses AI. Check for mistakes.

# Copy remaining application code
COPY . .

# Create data directories
Expand Down
30 changes: 9 additions & 21 deletions app/api/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,18 @@
"""

from fastapi import APIRouter, BackgroundTasks
from pydantic import BaseModel
from typing import Optional
from loguru import logger
from pydantic import BaseModel

router = APIRouter()


class ScanResponse(BaseModel):
"""Scan operation response."""

status: str
message: str
job_id: Optional[str] = None
job_id: str | None = None


@router.post("/scan", response_model=ScanResponse)
Expand All @@ -45,7 +45,7 @@ async def trigger_system_scan(background_tasks: BackgroundTasks, full: bool = Fa
return ScanResponse(
status="queued",
message=f"{'Full' if full else 'Incremental'} system scan queued",
job_id=None
job_id=None,
)


Expand All @@ -61,10 +61,7 @@ async def trigger_screenshot():

# TODO: Implement in Phase 1 (Visual Timeline)

return {
"status": "captured",
"message": "Screenshot captured successfully"
}
return {"status": "captured", "message": "Screenshot captured successfully"}


@router.delete("/cleanup")
Expand All @@ -88,7 +85,7 @@ async def cleanup_old_data(days: int = 90):
return {
"status": "completed",
"message": f"Cleaned up data older than {days} days",
"deleted_count": 0
"deleted_count": 0,
}


Expand All @@ -107,16 +104,7 @@ async def get_system_stats():
# - Recent activity

return {
"nodes": {
"total": 0,
"by_type": {}
},
"relationships": {
"total": 0
},
"storage": {
"screenshots": "0 MB",
"ocr_data": "0 MB",
"database": "0 MB"
}
"nodes": {"total": 0, "by_type": {}},
"relationships": {"total": 0},
"storage": {"screenshots": "0 MB", "ocr_data": "0 MB", "database": "0 MB"},
}
31 changes: 14 additions & 17 deletions app/api/ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,36 +8,37 @@
- Status: MCP/container/service status
"""

from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import List, Dict, Any, Optional

from fastapi import APIRouter
from loguru import logger
from pydantic import BaseModel

router = APIRouter()


class AskRequest(BaseModel):
"""Ask endpoint request model."""

query: str
context: Optional[str] = None
context: str | None = None
limit: int = 10


class Source(BaseModel):
"""Source reference in response."""

type: str
path: Optional[str] = None
timestamp: Optional[str] = None
entity_id: Optional[str] = None
path: str | None = None
timestamp: str | None = None
entity_id: str | None = None


class AskResponse(BaseModel):
"""Ask endpoint response model."""

answer: str
sources: List[Source]
sources: list[Source]
query_type: str
cypher_query: Optional[str] = None
cypher_query: str | None = None


@router.post("/", response_model=AskResponse)
Expand Down Expand Up @@ -66,12 +67,12 @@ async def ask_question(request: AskRequest):
answer=f"Query '{request.query}' received. Full implementation coming in Phase 2.",
sources=[],
query_type="unknown",
cypher_query=None
cypher_query=None,
)


@router.post("/ingest")
async def ingest_document(path: str, doc_type: Optional[str] = None):
async def ingest_document(path: str, doc_type: str | None = None):
"""
Manually ingest a document into the knowledge graph.

Expand All @@ -86,8 +87,4 @@ async def ingest_document(path: str, doc_type: Optional[str] = None):

# TODO: Implement in Phase 1 (System Graph)

return {
"status": "queued",
"path": path,
"message": "Document queued for ingestion"
}
return {"status": "queued", "path": path, "message": "Document queued for ingestion"}
8 changes: 5 additions & 3 deletions app/api/health.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,20 @@
Health check endpoint.
"""

from datetime import datetime

from fastapi import APIRouter
from pydantic import BaseModel
from datetime import datetime

from app.utils.neo4j_client import get_neo4j_client
from app.utils.config import get_settings
from app.utils.neo4j_client import get_neo4j_client

router = APIRouter()


class HealthResponse(BaseModel):
"""Health check response model."""

status: str
timestamp: datetime
neo4j_connected: bool
Expand Down Expand Up @@ -44,5 +46,5 @@ async def health_check():
status="healthy" if neo4j_connected else "degraded",
timestamp=datetime.now(),
neo4j_connected=neo4j_connected,
version=settings.api_version
version=settings.api_version,
)
Loading
Loading