Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 47 additions & 19 deletions src/financial_agent/data/macro.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,15 @@

_YAHOO_CHART = "https://query1.finance.yahoo.com/v8/finance/chart"

# Hardcoded economic calendar events by month for 2026.
_UPCOMING_EVENTS_2026: dict[int, list[str]] = {
1: ["FOMC Meeting Jan 27-28", "CPI Release Jan 14", "Jobs Report Jan 9"],
2: ["CPI Release Feb 12", "Jobs Report Feb 6", "Retail Sales Feb 14"],
3: ["FOMC Meeting Mar 17-18", "CPI Release Mar 11", "Jobs Report Mar 6"],
4: ["CPI Release Apr 10", "Jobs Report Apr 3", "Retail Sales Apr 15"],
5: ["FOMC Meeting May 5-6", "CPI Release May 13", "Jobs Report May 8"],
6: ["FOMC Meeting Jun 16-17", "CPI Release Jun 10", "Jobs Report Jun 5"],
7: ["CPI Release Jul 14", "Jobs Report Jul 2", "Retail Sales Jul 16"],
8: ["FOMC Meeting Aug 4-5", "CPI Release Aug 12", "Jobs Report Aug 7"],
9: ["FOMC Meeting Sep 15-16", "CPI Release Sep 10", "Jobs Report Sep 4"],
10: ["CPI Release Oct 13", "Jobs Report Oct 2", "Retail Sales Oct 16"],
11: ["FOMC Meeting Nov 3-4", "CPI Release Nov 12", "Jobs Report Nov 6"],
12: ["FOMC Meeting Dec 15-16", "CPI Release Dec 10", "Jobs Report Dec 4"],
}
# FOMC meetings are held ~8 times/year on a fixed schedule.
# CPI is released around the 10th-14th of each month.
# Jobs Report (NFP) is the first Friday of each month.
# These approximate patterns hold year over year.
_RECURRING_EVENTS: list[str] = [
"FOMC Meeting (check federalreserve.gov for exact dates)",
"CPI Release (~10th-14th of month)",
"Jobs Report (1st Friday of month)",
]


class MacroProvider:
Expand All @@ -52,14 +46,15 @@ def _build_context(self) -> MacroContext:
"""Build the full macro context from multiple data sources."""
vix_level, vix_trend = self._fetch_vix()
spy_trend = self._fetch_spy_trend()
ten_year_yield = self._fetch_ten_year_yield()
regime = _determine_regime(vix_level)
events = _get_upcoming_events()

return MacroContext(
vix_level=vix_level,
vix_trend=vix_trend,
spy_trend=spy_trend,
ten_year_yield=None,
ten_year_yield=ten_year_yield,
market_regime=regime,
upcoming_events=events,
)
Expand Down Expand Up @@ -88,6 +83,17 @@ def _fetch_vix(self) -> tuple[float | None, str]:
log.warning("vix_fetch_error", exc_info=True)
return None, "stable"

def _fetch_ten_year_yield(self) -> float | None:
"""Fetch the 10-year Treasury yield from Yahoo Finance."""
try:
data = _yahoo_chart("%5ETNX", "5d", "1d")
closes = _extract_closes(data)
if closes:
return round(closes[-1], 2)
except Exception:
log.debug("ten_year_yield_fetch_failed", exc_info=True)
return None

def _fetch_spy_trend(self) -> str:
"""Determine SPY trend relative to its recent moving average."""
try:
Expand Down Expand Up @@ -169,6 +175,28 @@ def _determine_regime(vix_level: float | None) -> str:


def _get_upcoming_events() -> list[str]:
"""Return hardcoded upcoming economic events for the current month."""
current_month = date.today().month
return _UPCOMING_EVENTS_2026.get(current_month, [])
"""Return approximate upcoming economic events.

Uses recurring patterns rather than hardcoded dates, so it works
across years without manual updates.
"""
today = date.today()
events: list[str] = []

# Jobs report: first Friday of the month
first_day = today.replace(day=1)
# Monday=0 ... Friday=4; days until first Friday
days_to_friday = (4 - first_day.weekday()) % 7
first_friday = first_day.replace(day=1 + days_to_friday)
if first_friday >= today:
events.append(f"Jobs Report {first_friday.strftime('%b %d')}")

# CPI: typically around the 10th-14th
cpi_approx = today.replace(day=12)
if cpi_approx >= today:
events.append(f"CPI Release ~{cpi_approx.strftime('%b %d')}")

# Generic FOMC reminder (meets ~8x/year, roughly every 6 weeks)
events.append("FOMC (check schedule)")

return events
81 changes: 74 additions & 7 deletions src/financial_agent/data/news.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,69 @@
_FINNHUB_BASE = "https://finnhub.io/api/v1"

_POSITIVE_WORDS: frozenset[str] = frozenset(
{"surge", "beat", "upgrade", "growth", "profit", "record", "breakout"}
{
"surge",
"surges",
"surging",
"beat",
"beats",
"upgrade",
"upgrades",
"growth",
"profit",
"profits",
"record",
"breakout",
"rally",
"rallies",
"soar",
"soars",
"gains",
"bullish",
"outperform",
"outperforms",
"raises",
"boost",
"boosted",
"jumps",
"strong",
"exceeds",
"tops",
}
)
_NEGATIVE_WORDS: frozenset[str] = frozenset(
{"miss", "downgrade", "decline", "loss", "cut", "warning", "crash"}
{
"miss",
"misses",
"downgrade",
"downgrades",
"decline",
"declines",
"loss",
"losses",
"cut",
"cuts",
"warning",
"warns",
"crash",
"crashes",
"plunge",
"plunges",
"falls",
"bearish",
"underperform",
"underperforms",
"lowers",
"slump",
"slumps",
"drops",
"weak",
"layoffs",
"recall",
}
)
_NEGATION_WORDS: frozenset[str] = frozenset(
{"not", "no", "never", "neither", "nor", "doesn't", "didn't", "won't", "isn't", "aren't"}
)

_MAX_SYMBOLS = 5
Expand Down Expand Up @@ -111,19 +170,27 @@ def _fetch_symbol_news(self, symbol: str) -> NewsSentiment | None:


def _compute_headline_sentiment(headline: str) -> float:
"""Compute a simple keyword-based sentiment score for a headline.
"""Compute keyword-based sentiment with negation handling.

Returns a float in [-1.0, 1.0].
Returns a float in [-1.0, 1.0]. Negation words ("not", "no", etc.)
flip the polarity of the next sentiment word.
"""
words = headline.lower().split()
score = 0.0
negate = False

for word in words:
# Strip punctuation for matching
cleaned = word.strip(".,!?;:'\"()-")
if cleaned in _NEGATION_WORDS:
negate = True
continue
if cleaned in _POSITIVE_WORDS:
score += 0.3
score += -0.3 if negate else 0.3
negate = False
elif cleaned in _NEGATIVE_WORDS:
score -= 0.3
score += 0.3 if negate else -0.3
negate = False
else:
negate = False

return max(-1.0, min(1.0, score))
6 changes: 4 additions & 2 deletions src/financial_agent/performance/benchmarking.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import structlog
from pydantic import BaseModel, Field

from financial_agent.utils.io import atomic_write

log = structlog.get_logger()


Expand Down Expand Up @@ -51,10 +53,10 @@ def _load(self) -> None:
self._trades = []

def _save(self) -> None:
"""Persist the trade journal to disk, keeping only the last 1000 trades."""
"""Persist the trade journal to disk atomically, keeping only the last 1000 trades."""
self._trades = self._trades[-1000:]
data = [record.model_dump() for record in self._trades]
self._path.write_text(json.dumps(data, indent=2), encoding="utf-8")
atomic_write(self._path, json.dumps(data, indent=2))
log.debug("trade_journal_saved", count=len(self._trades))

def record_trade(self, record: TradeRecord) -> None:
Expand Down
12 changes: 4 additions & 8 deletions src/financial_agent/persistence/equity_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import structlog
from pydantic import BaseModel

from financial_agent.utils.io import atomic_write

log = structlog.get_logger()

_MAX_HISTORY = 365
Expand Down Expand Up @@ -85,14 +87,8 @@ def _save(self) -> None:
trimmed = self._history[-_MAX_HISTORY:]
self._history = trimmed
data = [record.model_dump() for record in self._history]
self._path.write_text(
json.dumps(data, indent=2),
encoding="utf-8",
)
self._peak_path.write_text(
json.dumps({"peak": self._peak_equity}),
encoding="utf-8",
)
atomic_write(self._path, json.dumps(data, indent=2))
atomic_write(self._peak_path, json.dumps({"peak": self._peak_equity}))
except Exception:
log.error(
"equity_save_failed",
Expand Down
13 changes: 6 additions & 7 deletions src/financial_agent/persistence/thesis_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import structlog
from pydantic import BaseModel, Field

from financial_agent.utils.io import atomic_write

log = structlog.get_logger()


Expand Down Expand Up @@ -68,13 +70,10 @@ def _load(self) -> None:
self._theses = {}

def _save(self) -> None:
"""Write all theses to disk."""
"""Write all theses to disk atomically."""
try:
data = {symbol: thesis.model_dump() for symbol, thesis in self._theses.items()}
self._path.write_text(
json.dumps(data, indent=2),
encoding="utf-8",
)
atomic_write(self._path, json.dumps(data, indent=2))
except Exception:
log.error(
"theses_save_failed",
Expand Down Expand Up @@ -187,9 +186,9 @@ def _prune_expired_cooldowns(self, max_age_hours: int = 72) -> None:
self._save_cooldowns()

def _save_cooldowns(self) -> None:
"""Write sell cooldown timestamps to disk."""
"""Write sell cooldown timestamps to disk atomically."""
try:
self._cooldown_path.write_text(json.dumps(self._cooldowns, indent=2), encoding="utf-8")
atomic_write(self._cooldown_path, json.dumps(self._cooldowns, indent=2))
except Exception:
log.debug("cooldowns_save_failed", exc_info=True)

Expand Down
18 changes: 18 additions & 0 deletions src/financial_agent/utils/io.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""File I/O utilities for safe persistence."""

from __future__ import annotations

import os
from pathlib import Path # noqa: TC003 — used at runtime


def atomic_write(path: Path, content: str) -> None:
"""Write content to a file atomically using temp file + rename.

Prevents data corruption if the process crashes or two agents write
concurrently. On both POSIX and Windows, ``os.replace`` is atomic
at the filesystem level.
"""
tmp_path = path.with_suffix(".tmp")
tmp_path.write_text(content, encoding="utf-8")
os.replace(tmp_path, path)