Compare commits
12 Commits
feature/is
...
feature/is
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a14f944fcc | ||
| 56f7405baa | |||
|
|
e3b1ecc572 | ||
| 8acf72b22c | |||
|
|
c95102a0bd | ||
| 0685d62f9c | |||
|
|
78021d4695 | ||
| 3cdd10783b | |||
|
|
c4e31be27a | ||
| 9d9ade14eb | |||
| c5831966ed | |||
|
|
f03cc6039b |
@@ -108,7 +108,7 @@ class MarketScanner:
|
|||||||
self.context_store.set_context(
|
self.context_store.set_context(
|
||||||
ContextLayer.L7_REALTIME,
|
ContextLayer.L7_REALTIME,
|
||||||
timeframe,
|
timeframe,
|
||||||
f"{market.code}_{stock_code}_volatility",
|
f"volatility_{market.code}_{stock_code}",
|
||||||
{
|
{
|
||||||
"price": metrics.current_price,
|
"price": metrics.current_price,
|
||||||
"atr": metrics.atr,
|
"atr": metrics.atr,
|
||||||
@@ -179,7 +179,7 @@ class MarketScanner:
|
|||||||
self.context_store.set_context(
|
self.context_store.set_context(
|
||||||
ContextLayer.L7_REALTIME,
|
ContextLayer.L7_REALTIME,
|
||||||
timeframe,
|
timeframe,
|
||||||
f"{market.code}_scan_result",
|
f"scan_result_{market.code}",
|
||||||
{
|
{
|
||||||
"total_scanned": len(valid_metrics),
|
"total_scanned": len(valid_metrics),
|
||||||
"top_movers": [m.stock_code for m in top_movers],
|
"top_movers": [m.stock_code for m in top_movers],
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ The context tree implements Pillar 2: hierarchical memory management across
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from src.context.layer import ContextLayer
|
from src.context.layer import ContextLayer
|
||||||
|
from src.context.scheduler import ContextScheduler
|
||||||
from src.context.store import ContextStore
|
from src.context.store import ContextStore
|
||||||
|
|
||||||
__all__ = ["ContextLayer", "ContextStore"]
|
__all__ = ["ContextLayer", "ContextScheduler", "ContextStore"]
|
||||||
|
|||||||
@@ -18,52 +18,83 @@ class ContextAggregator:
|
|||||||
self.conn = conn
|
self.conn = conn
|
||||||
self.store = ContextStore(conn)
|
self.store = ContextStore(conn)
|
||||||
|
|
||||||
def aggregate_daily_from_trades(self, date: str | None = None) -> None:
|
def aggregate_daily_from_trades(
|
||||||
|
self, date: str | None = None, market: str | None = None
|
||||||
|
) -> None:
|
||||||
"""Aggregate L6 (daily) context from trades table.
|
"""Aggregate L6 (daily) context from trades table.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
date: Date in YYYY-MM-DD format. If None, uses today.
|
date: Date in YYYY-MM-DD format. If None, uses today.
|
||||||
|
market: Market code filter (e.g., "KR", "US"). If None, aggregates all markets.
|
||||||
"""
|
"""
|
||||||
if date is None:
|
if date is None:
|
||||||
date = datetime.now(UTC).date().isoformat()
|
date = datetime.now(UTC).date().isoformat()
|
||||||
|
|
||||||
# Calculate daily metrics from trades
|
if market is None:
|
||||||
cursor = self.conn.execute(
|
cursor = self.conn.execute(
|
||||||
"""
|
"""
|
||||||
SELECT
|
SELECT DISTINCT market
|
||||||
COUNT(*) as trade_count,
|
FROM trades
|
||||||
SUM(CASE WHEN action = 'BUY' THEN 1 ELSE 0 END) as buys,
|
WHERE DATE(timestamp) = ?
|
||||||
SUM(CASE WHEN action = 'SELL' THEN 1 ELSE 0 END) as sells,
|
""",
|
||||||
SUM(CASE WHEN action = 'HOLD' THEN 1 ELSE 0 END) as holds,
|
(date,),
|
||||||
AVG(confidence) as avg_confidence,
|
|
||||||
SUM(pnl) as total_pnl,
|
|
||||||
COUNT(DISTINCT stock_code) as unique_stocks,
|
|
||||||
SUM(CASE WHEN pnl > 0 THEN 1 ELSE 0 END) as wins,
|
|
||||||
SUM(CASE WHEN pnl < 0 THEN 1 ELSE 0 END) as losses
|
|
||||||
FROM trades
|
|
||||||
WHERE DATE(timestamp) = ?
|
|
||||||
""",
|
|
||||||
(date,),
|
|
||||||
)
|
|
||||||
row = cursor.fetchone()
|
|
||||||
|
|
||||||
if row and row[0] > 0: # At least one trade
|
|
||||||
trade_count, buys, sells, holds, avg_conf, total_pnl, stocks, wins, losses = row
|
|
||||||
|
|
||||||
# Store daily metrics in L6
|
|
||||||
self.store.set_context(ContextLayer.L6_DAILY, date, "trade_count", trade_count)
|
|
||||||
self.store.set_context(ContextLayer.L6_DAILY, date, "buys", buys)
|
|
||||||
self.store.set_context(ContextLayer.L6_DAILY, date, "sells", sells)
|
|
||||||
self.store.set_context(ContextLayer.L6_DAILY, date, "holds", holds)
|
|
||||||
self.store.set_context(
|
|
||||||
ContextLayer.L6_DAILY, date, "avg_confidence", round(avg_conf, 2)
|
|
||||||
)
|
)
|
||||||
self.store.set_context(
|
markets = [row[0] for row in cursor.fetchall() if row[0]]
|
||||||
ContextLayer.L6_DAILY, date, "total_pnl", round(total_pnl, 2)
|
else:
|
||||||
|
markets = [market]
|
||||||
|
|
||||||
|
for market_code in markets:
|
||||||
|
# Calculate daily metrics from trades for the market
|
||||||
|
cursor = self.conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as trade_count,
|
||||||
|
SUM(CASE WHEN action = 'BUY' THEN 1 ELSE 0 END) as buys,
|
||||||
|
SUM(CASE WHEN action = 'SELL' THEN 1 ELSE 0 END) as sells,
|
||||||
|
SUM(CASE WHEN action = 'HOLD' THEN 1 ELSE 0 END) as holds,
|
||||||
|
AVG(confidence) as avg_confidence,
|
||||||
|
SUM(pnl) as total_pnl,
|
||||||
|
COUNT(DISTINCT stock_code) as unique_stocks,
|
||||||
|
SUM(CASE WHEN pnl > 0 THEN 1 ELSE 0 END) as wins,
|
||||||
|
SUM(CASE WHEN pnl < 0 THEN 1 ELSE 0 END) as losses
|
||||||
|
FROM trades
|
||||||
|
WHERE DATE(timestamp) = ? AND market = ?
|
||||||
|
""",
|
||||||
|
(date, market_code),
|
||||||
)
|
)
|
||||||
self.store.set_context(ContextLayer.L6_DAILY, date, "unique_stocks", stocks)
|
row = cursor.fetchone()
|
||||||
win_rate = round(wins / max(wins + losses, 1) * 100, 2)
|
|
||||||
self.store.set_context(ContextLayer.L6_DAILY, date, "win_rate", win_rate)
|
if row and row[0] > 0: # At least one trade
|
||||||
|
trade_count, buys, sells, holds, avg_conf, total_pnl, stocks, wins, losses = row
|
||||||
|
|
||||||
|
key_suffix = f"_{market_code}"
|
||||||
|
|
||||||
|
# Store daily metrics in L6 with market suffix
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, date, f"trade_count{key_suffix}", trade_count
|
||||||
|
)
|
||||||
|
self.store.set_context(ContextLayer.L6_DAILY, date, f"buys{key_suffix}", buys)
|
||||||
|
self.store.set_context(ContextLayer.L6_DAILY, date, f"sells{key_suffix}", sells)
|
||||||
|
self.store.set_context(ContextLayer.L6_DAILY, date, f"holds{key_suffix}", holds)
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY,
|
||||||
|
date,
|
||||||
|
f"avg_confidence{key_suffix}",
|
||||||
|
round(avg_conf, 2),
|
||||||
|
)
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY,
|
||||||
|
date,
|
||||||
|
f"total_pnl{key_suffix}",
|
||||||
|
round(total_pnl, 2),
|
||||||
|
)
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, date, f"unique_stocks{key_suffix}", stocks
|
||||||
|
)
|
||||||
|
win_rate = round(wins / max(wins + losses, 1) * 100, 2)
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, date, f"win_rate{key_suffix}", win_rate
|
||||||
|
)
|
||||||
|
|
||||||
def aggregate_weekly_from_daily(self, week: str | None = None) -> None:
|
def aggregate_weekly_from_daily(self, week: str | None = None) -> None:
|
||||||
"""Aggregate L5 (weekly) context from L6 (daily).
|
"""Aggregate L5 (weekly) context from L6 (daily).
|
||||||
@@ -92,14 +123,25 @@ class ContextAggregator:
|
|||||||
daily_data[row[0]].append(json.loads(row[1]))
|
daily_data[row[0]].append(json.loads(row[1]))
|
||||||
|
|
||||||
if daily_data:
|
if daily_data:
|
||||||
# Sum all PnL values
|
# Sum all PnL values (market-specific if suffixed)
|
||||||
if "total_pnl" in daily_data:
|
if "total_pnl" in daily_data:
|
||||||
total_pnl = sum(daily_data["total_pnl"])
|
total_pnl = sum(daily_data["total_pnl"])
|
||||||
self.store.set_context(
|
self.store.set_context(
|
||||||
ContextLayer.L5_WEEKLY, week, "weekly_pnl", round(total_pnl, 2)
|
ContextLayer.L5_WEEKLY, week, "weekly_pnl", round(total_pnl, 2)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Average all confidence values
|
for key, values in daily_data.items():
|
||||||
|
if key.startswith("total_pnl_"):
|
||||||
|
market_code = key.split("total_pnl_", 1)[1]
|
||||||
|
total_pnl = sum(values)
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L5_WEEKLY,
|
||||||
|
week,
|
||||||
|
f"weekly_pnl_{market_code}",
|
||||||
|
round(total_pnl, 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Average all confidence values (market-specific if suffixed)
|
||||||
if "avg_confidence" in daily_data:
|
if "avg_confidence" in daily_data:
|
||||||
conf_values = daily_data["avg_confidence"]
|
conf_values = daily_data["avg_confidence"]
|
||||||
avg_conf = sum(conf_values) / len(conf_values)
|
avg_conf = sum(conf_values) / len(conf_values)
|
||||||
@@ -107,6 +149,17 @@ class ContextAggregator:
|
|||||||
ContextLayer.L5_WEEKLY, week, "avg_confidence", round(avg_conf, 2)
|
ContextLayer.L5_WEEKLY, week, "avg_confidence", round(avg_conf, 2)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for key, values in daily_data.items():
|
||||||
|
if key.startswith("avg_confidence_"):
|
||||||
|
market_code = key.split("avg_confidence_", 1)[1]
|
||||||
|
avg_conf = sum(values) / len(values)
|
||||||
|
self.store.set_context(
|
||||||
|
ContextLayer.L5_WEEKLY,
|
||||||
|
week,
|
||||||
|
f"avg_confidence_{market_code}",
|
||||||
|
round(avg_conf, 2),
|
||||||
|
)
|
||||||
|
|
||||||
def aggregate_monthly_from_weekly(self, month: str | None = None) -> None:
|
def aggregate_monthly_from_weekly(self, month: str | None = None) -> None:
|
||||||
"""Aggregate L4 (monthly) context from L5 (weekly).
|
"""Aggregate L4 (monthly) context from L5 (weekly).
|
||||||
|
|
||||||
@@ -135,8 +188,16 @@ class ContextAggregator:
|
|||||||
|
|
||||||
if weekly_data:
|
if weekly_data:
|
||||||
# Sum all weekly PnL values
|
# Sum all weekly PnL values
|
||||||
|
total_pnl_values: list[float] = []
|
||||||
if "weekly_pnl" in weekly_data:
|
if "weekly_pnl" in weekly_data:
|
||||||
total_pnl = sum(weekly_data["weekly_pnl"])
|
total_pnl_values.extend(weekly_data["weekly_pnl"])
|
||||||
|
|
||||||
|
for key, values in weekly_data.items():
|
||||||
|
if key.startswith("weekly_pnl_"):
|
||||||
|
total_pnl_values.extend(values)
|
||||||
|
|
||||||
|
if total_pnl_values:
|
||||||
|
total_pnl = sum(total_pnl_values)
|
||||||
self.store.set_context(
|
self.store.set_context(
|
||||||
ContextLayer.L4_MONTHLY, month, "monthly_pnl", round(total_pnl, 2)
|
ContextLayer.L4_MONTHLY, month, "monthly_pnl", round(total_pnl, 2)
|
||||||
)
|
)
|
||||||
@@ -230,21 +291,44 @@ class ContextAggregator:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def run_all_aggregations(self) -> None:
|
def run_all_aggregations(self) -> None:
|
||||||
"""Run all aggregations from L7 to L1 (bottom-up)."""
|
"""Run all aggregations from L7 to L1 (bottom-up).
|
||||||
|
|
||||||
|
All timeframes are derived from the latest trade timestamp so that
|
||||||
|
past data re-aggregation produces consistent results across layers.
|
||||||
|
"""
|
||||||
|
cursor = self.conn.execute("SELECT MAX(timestamp) FROM trades")
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if not row or row[0] is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
ts_raw = row[0]
|
||||||
|
if ts_raw.endswith("Z"):
|
||||||
|
ts_raw = ts_raw.replace("Z", "+00:00")
|
||||||
|
latest_ts = datetime.fromisoformat(ts_raw)
|
||||||
|
trade_date = latest_ts.date()
|
||||||
|
date_str = trade_date.isoformat()
|
||||||
|
|
||||||
|
iso_year, iso_week, _ = trade_date.isocalendar()
|
||||||
|
week_str = f"{iso_year}-W{iso_week:02d}"
|
||||||
|
month_str = f"{trade_date.year}-{trade_date.month:02d}"
|
||||||
|
quarter = (trade_date.month - 1) // 3 + 1
|
||||||
|
quarter_str = f"{trade_date.year}-Q{quarter}"
|
||||||
|
year_str = str(trade_date.year)
|
||||||
|
|
||||||
# L7 (trades) → L6 (daily)
|
# L7 (trades) → L6 (daily)
|
||||||
self.aggregate_daily_from_trades()
|
self.aggregate_daily_from_trades(date_str)
|
||||||
|
|
||||||
# L6 (daily) → L5 (weekly)
|
# L6 (daily) → L5 (weekly)
|
||||||
self.aggregate_weekly_from_daily()
|
self.aggregate_weekly_from_daily(week_str)
|
||||||
|
|
||||||
# L5 (weekly) → L4 (monthly)
|
# L5 (weekly) → L4 (monthly)
|
||||||
self.aggregate_monthly_from_weekly()
|
self.aggregate_monthly_from_weekly(month_str)
|
||||||
|
|
||||||
# L4 (monthly) → L3 (quarterly)
|
# L4 (monthly) → L3 (quarterly)
|
||||||
self.aggregate_quarterly_from_monthly()
|
self.aggregate_quarterly_from_monthly(quarter_str)
|
||||||
|
|
||||||
# L3 (quarterly) → L2 (annual)
|
# L3 (quarterly) → L2 (annual)
|
||||||
self.aggregate_annual_from_quarterly()
|
self.aggregate_annual_from_quarterly(year_str)
|
||||||
|
|
||||||
# L2 (annual) → L1 (legacy)
|
# L2 (annual) → L1 (legacy)
|
||||||
self.aggregate_legacy_from_annual()
|
self.aggregate_legacy_from_annual()
|
||||||
|
|||||||
135
src/context/scheduler.py
Normal file
135
src/context/scheduler.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
"""Context aggregation scheduler for periodic rollups and cleanup."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
from calendar import monthrange
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
|
from src.context.aggregator import ContextAggregator
|
||||||
|
from src.context.store import ContextStore
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ScheduleResult:
|
||||||
|
"""Represents which scheduled tasks ran."""
|
||||||
|
|
||||||
|
weekly: bool = False
|
||||||
|
monthly: bool = False
|
||||||
|
quarterly: bool = False
|
||||||
|
annual: bool = False
|
||||||
|
legacy: bool = False
|
||||||
|
cleanup: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class ContextScheduler:
|
||||||
|
"""Run periodic context aggregations and cleanup when due."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
conn: sqlite3.Connection | None = None,
|
||||||
|
aggregator: ContextAggregator | None = None,
|
||||||
|
store: ContextStore | None = None,
|
||||||
|
) -> None:
|
||||||
|
if aggregator is None:
|
||||||
|
if conn is None:
|
||||||
|
raise ValueError("conn is required when aggregator is not provided")
|
||||||
|
aggregator = ContextAggregator(conn)
|
||||||
|
self.aggregator = aggregator
|
||||||
|
|
||||||
|
if store is None:
|
||||||
|
store = getattr(aggregator, "store", None)
|
||||||
|
if store is None:
|
||||||
|
if conn is None:
|
||||||
|
raise ValueError("conn is required when store is not provided")
|
||||||
|
store = ContextStore(conn)
|
||||||
|
self.store = store
|
||||||
|
|
||||||
|
self._last_run: dict[str, str] = {}
|
||||||
|
|
||||||
|
def run_if_due(self, now: datetime | None = None) -> ScheduleResult:
|
||||||
|
"""Run scheduled aggregations if their schedule is due.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
now: Current datetime (UTC). If None, uses current time.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ScheduleResult indicating which tasks ran.
|
||||||
|
"""
|
||||||
|
if now is None:
|
||||||
|
now = datetime.now(UTC)
|
||||||
|
|
||||||
|
today = now.date().isoformat()
|
||||||
|
result = ScheduleResult()
|
||||||
|
|
||||||
|
if self._should_run("cleanup", today):
|
||||||
|
self.store.cleanup_expired_contexts()
|
||||||
|
result = self._with(result, cleanup=True)
|
||||||
|
|
||||||
|
if self._is_sunday(now) and self._should_run("weekly", today):
|
||||||
|
week = now.strftime("%Y-W%V")
|
||||||
|
self.aggregator.aggregate_weekly_from_daily(week)
|
||||||
|
result = self._with(result, weekly=True)
|
||||||
|
|
||||||
|
if self._is_last_day_of_month(now) and self._should_run("monthly", today):
|
||||||
|
month = now.strftime("%Y-%m")
|
||||||
|
self.aggregator.aggregate_monthly_from_weekly(month)
|
||||||
|
result = self._with(result, monthly=True)
|
||||||
|
|
||||||
|
if self._is_last_day_of_quarter(now) and self._should_run("quarterly", today):
|
||||||
|
quarter = self._current_quarter(now)
|
||||||
|
self.aggregator.aggregate_quarterly_from_monthly(quarter)
|
||||||
|
result = self._with(result, quarterly=True)
|
||||||
|
|
||||||
|
if self._is_last_day_of_year(now) and self._should_run("annual", today):
|
||||||
|
year = str(now.year)
|
||||||
|
self.aggregator.aggregate_annual_from_quarterly(year)
|
||||||
|
result = self._with(result, annual=True)
|
||||||
|
|
||||||
|
# Legacy rollup runs after annual aggregation.
|
||||||
|
self.aggregator.aggregate_legacy_from_annual()
|
||||||
|
result = self._with(result, legacy=True)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _should_run(self, key: str, date_str: str) -> bool:
|
||||||
|
if self._last_run.get(key) == date_str:
|
||||||
|
return False
|
||||||
|
self._last_run[key] = date_str
|
||||||
|
return True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_sunday(now: datetime) -> bool:
|
||||||
|
return now.weekday() == 6
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_last_day_of_month(now: datetime) -> bool:
|
||||||
|
last_day = monthrange(now.year, now.month)[1]
|
||||||
|
return now.day == last_day
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _is_last_day_of_quarter(cls, now: datetime) -> bool:
|
||||||
|
if now.month not in (3, 6, 9, 12):
|
||||||
|
return False
|
||||||
|
return cls._is_last_day_of_month(now)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_last_day_of_year(now: datetime) -> bool:
|
||||||
|
return now.month == 12 and now.day == 31
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _current_quarter(now: datetime) -> str:
|
||||||
|
quarter = (now.month - 1) // 3 + 1
|
||||||
|
return f"{now.year}-Q{quarter}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _with(result: ScheduleResult, **kwargs: bool) -> ScheduleResult:
|
||||||
|
return ScheduleResult(
|
||||||
|
weekly=kwargs.get("weekly", result.weekly),
|
||||||
|
monthly=kwargs.get("monthly", result.monthly),
|
||||||
|
quarterly=kwargs.get("quarterly", result.quarterly),
|
||||||
|
annual=kwargs.get("annual", result.annual),
|
||||||
|
legacy=kwargs.get("legacy", result.legacy),
|
||||||
|
cleanup=kwargs.get("cleanup", result.cleanup),
|
||||||
|
)
|
||||||
35
src/db.py
35
src/db.py
@@ -6,6 +6,7 @@ import json
|
|||||||
import sqlite3
|
import sqlite3
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
def init_db(db_path: str) -> sqlite3.Connection:
|
def init_db(db_path: str) -> sqlite3.Connection:
|
||||||
@@ -26,7 +27,8 @@ def init_db(db_path: str) -> sqlite3.Connection:
|
|||||||
price REAL,
|
price REAL,
|
||||||
pnl REAL DEFAULT 0.0,
|
pnl REAL DEFAULT 0.0,
|
||||||
market TEXT DEFAULT 'KR',
|
market TEXT DEFAULT 'KR',
|
||||||
exchange_code TEXT DEFAULT 'KRX'
|
exchange_code TEXT DEFAULT 'KRX',
|
||||||
|
decision_id TEXT
|
||||||
)
|
)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@@ -41,6 +43,8 @@ def init_db(db_path: str) -> sqlite3.Connection:
|
|||||||
conn.execute("ALTER TABLE trades ADD COLUMN exchange_code TEXT DEFAULT 'KRX'")
|
conn.execute("ALTER TABLE trades ADD COLUMN exchange_code TEXT DEFAULT 'KRX'")
|
||||||
if "selection_context" not in columns:
|
if "selection_context" not in columns:
|
||||||
conn.execute("ALTER TABLE trades ADD COLUMN selection_context TEXT")
|
conn.execute("ALTER TABLE trades ADD COLUMN selection_context TEXT")
|
||||||
|
if "decision_id" not in columns:
|
||||||
|
conn.execute("ALTER TABLE trades ADD COLUMN decision_id TEXT")
|
||||||
|
|
||||||
# Context tree tables for multi-layered memory management
|
# Context tree tables for multi-layered memory management
|
||||||
conn.execute(
|
conn.execute(
|
||||||
@@ -143,6 +147,7 @@ def log_trade(
|
|||||||
market: str = "KR",
|
market: str = "KR",
|
||||||
exchange_code: str = "KRX",
|
exchange_code: str = "KRX",
|
||||||
selection_context: dict[str, any] | None = None,
|
selection_context: dict[str, any] | None = None,
|
||||||
|
decision_id: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Insert a trade record into the database.
|
"""Insert a trade record into the database.
|
||||||
|
|
||||||
@@ -166,9 +171,9 @@ def log_trade(
|
|||||||
"""
|
"""
|
||||||
INSERT INTO trades (
|
INSERT INTO trades (
|
||||||
timestamp, stock_code, action, confidence, rationale,
|
timestamp, stock_code, action, confidence, rationale,
|
||||||
quantity, price, pnl, market, exchange_code, selection_context
|
quantity, price, pnl, market, exchange_code, selection_context, decision_id
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
""",
|
""",
|
||||||
(
|
(
|
||||||
datetime.now(UTC).isoformat(),
|
datetime.now(UTC).isoformat(),
|
||||||
@@ -182,6 +187,30 @@ def log_trade(
|
|||||||
market,
|
market,
|
||||||
exchange_code,
|
exchange_code,
|
||||||
context_json,
|
context_json,
|
||||||
|
decision_id,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def get_latest_buy_trade(
|
||||||
|
conn: sqlite3.Connection, stock_code: str, market: str
|
||||||
|
) -> dict[str, Any] | None:
|
||||||
|
"""Fetch the most recent BUY trade for a stock and market."""
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT decision_id, price, quantity
|
||||||
|
FROM trades
|
||||||
|
WHERE stock_code = ?
|
||||||
|
AND market = ?
|
||||||
|
AND action = 'BUY'
|
||||||
|
AND decision_id IS NOT NULL
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
""",
|
||||||
|
(stock_code, market),
|
||||||
|
)
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if not row:
|
||||||
|
return None
|
||||||
|
return {"decision_id": row[0], "price": row[1], "quantity": row[2]}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from src.evolution.performance_tracker import (
|
|||||||
PerformanceTracker,
|
PerformanceTracker,
|
||||||
StrategyMetrics,
|
StrategyMetrics,
|
||||||
)
|
)
|
||||||
|
from src.evolution.scorecard import DailyScorecard
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"EvolutionOptimizer",
|
"EvolutionOptimizer",
|
||||||
@@ -16,4 +17,5 @@ __all__ = [
|
|||||||
"PerformanceTracker",
|
"PerformanceTracker",
|
||||||
"PerformanceDashboard",
|
"PerformanceDashboard",
|
||||||
"StrategyMetrics",
|
"StrategyMetrics",
|
||||||
|
"DailyScorecard",
|
||||||
]
|
]
|
||||||
|
|||||||
25
src/evolution/scorecard.py
Normal file
25
src/evolution/scorecard.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""Daily scorecard model for end-of-day performance review."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DailyScorecard:
|
||||||
|
"""Structured daily performance snapshot for a single market."""
|
||||||
|
|
||||||
|
date: str
|
||||||
|
market: str
|
||||||
|
total_decisions: int
|
||||||
|
buys: int
|
||||||
|
sells: int
|
||||||
|
holds: int
|
||||||
|
total_pnl: float
|
||||||
|
win_rate: float
|
||||||
|
avg_confidence: float
|
||||||
|
scenario_match_rate: float
|
||||||
|
top_winners: list[str] = field(default_factory=list)
|
||||||
|
top_losers: list[str] = field(default_factory=list)
|
||||||
|
lessons: list[str] = field(default_factory=list)
|
||||||
|
cross_market_note: str = ""
|
||||||
97
src/main.py
97
src/main.py
@@ -13,7 +13,6 @@ import signal
|
|||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from src.analysis.scanner import MarketScanner
|
|
||||||
from src.analysis.smart_scanner import ScanCandidate, SmartVolatilityScanner
|
from src.analysis.smart_scanner import ScanCandidate, SmartVolatilityScanner
|
||||||
from src.analysis.volatility import VolatilityAnalyzer
|
from src.analysis.volatility import VolatilityAnalyzer
|
||||||
from src.brain.context_selector import ContextSelector
|
from src.brain.context_selector import ContextSelector
|
||||||
@@ -21,12 +20,13 @@ from src.brain.gemini_client import GeminiClient, TradeDecision
|
|||||||
from src.broker.kis_api import KISBroker
|
from src.broker.kis_api import KISBroker
|
||||||
from src.broker.overseas import OverseasBroker
|
from src.broker.overseas import OverseasBroker
|
||||||
from src.config import Settings
|
from src.config import Settings
|
||||||
|
from src.context.aggregator import ContextAggregator
|
||||||
from src.context.layer import ContextLayer
|
from src.context.layer import ContextLayer
|
||||||
from src.context.store import ContextStore
|
from src.context.store import ContextStore
|
||||||
from src.core.criticality import CriticalityAssessor
|
from src.core.criticality import CriticalityAssessor
|
||||||
from src.core.priority_queue import PriorityTaskQueue
|
from src.core.priority_queue import PriorityTaskQueue
|
||||||
from src.core.risk_manager import CircuitBreakerTripped, FatFingerRejected, RiskManager
|
from src.core.risk_manager import CircuitBreakerTripped, FatFingerRejected, RiskManager
|
||||||
from src.db import init_db, log_trade
|
from src.db import get_latest_buy_trade, init_db, log_trade
|
||||||
from src.logging.decision_logger import DecisionLogger
|
from src.logging.decision_logger import DecisionLogger
|
||||||
from src.logging_config import setup_logging
|
from src.logging_config import setup_logging
|
||||||
from src.markets.schedule import MarketInfo, get_next_market_open, get_open_markets
|
from src.markets.schedule import MarketInfo, get_next_market_open, get_open_markets
|
||||||
@@ -154,6 +154,38 @@ async def trading_cycle(
|
|||||||
market_data["rsi"] = candidate.rsi
|
market_data["rsi"] = candidate.rsi
|
||||||
market_data["volume_ratio"] = candidate.volume_ratio
|
market_data["volume_ratio"] = candidate.volume_ratio
|
||||||
|
|
||||||
|
# 1.3. Record L7 real-time context (market-scoped keys)
|
||||||
|
timeframe = datetime.now(UTC).isoformat()
|
||||||
|
context_store.set_context(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
timeframe,
|
||||||
|
f"volatility_{market.code}_{stock_code}",
|
||||||
|
{
|
||||||
|
"momentum_score": 50.0,
|
||||||
|
"volume_surge": 1.0,
|
||||||
|
"price_change_1m": 0.0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
context_store.set_context(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
timeframe,
|
||||||
|
f"price_{market.code}_{stock_code}",
|
||||||
|
{"current_price": current_price},
|
||||||
|
)
|
||||||
|
if candidate:
|
||||||
|
context_store.set_context(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
timeframe,
|
||||||
|
f"rsi_{market.code}_{stock_code}",
|
||||||
|
{"rsi": candidate.rsi},
|
||||||
|
)
|
||||||
|
context_store.set_context(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
timeframe,
|
||||||
|
f"volume_ratio_{market.code}_{stock_code}",
|
||||||
|
{"volume_ratio": candidate.volume_ratio},
|
||||||
|
)
|
||||||
|
|
||||||
# Build portfolio data for global rule evaluation
|
# Build portfolio data for global rule evaluation
|
||||||
portfolio_data = {
|
portfolio_data = {
|
||||||
"portfolio_pnl_pct": pnl_pct,
|
"portfolio_pnl_pct": pnl_pct,
|
||||||
@@ -171,7 +203,7 @@ async def trading_cycle(
|
|||||||
volatility_data = context_store.get_context(
|
volatility_data = context_store.get_context(
|
||||||
ContextLayer.L7_REALTIME,
|
ContextLayer.L7_REALTIME,
|
||||||
latest_timeframe,
|
latest_timeframe,
|
||||||
f"volatility_{stock_code}",
|
f"volatility_{market.code}_{stock_code}",
|
||||||
)
|
)
|
||||||
if volatility_data:
|
if volatility_data:
|
||||||
volatility_score = volatility_data.get("momentum_score", 50.0)
|
volatility_score = volatility_data.get("momentum_score", 50.0)
|
||||||
@@ -247,7 +279,7 @@ async def trading_cycle(
|
|||||||
"pnl_pct": pnl_pct,
|
"pnl_pct": pnl_pct,
|
||||||
}
|
}
|
||||||
|
|
||||||
decision_logger.log_decision(
|
decision_id = decision_logger.log_decision(
|
||||||
stock_code=stock_code,
|
stock_code=stock_code,
|
||||||
market=market.code,
|
market=market.code,
|
||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
@@ -259,6 +291,9 @@ async def trading_cycle(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# 3. Execute if actionable
|
# 3. Execute if actionable
|
||||||
|
quantity = 0
|
||||||
|
trade_price = current_price
|
||||||
|
trade_pnl = 0.0
|
||||||
if decision.action in ("BUY", "SELL"):
|
if decision.action in ("BUY", "SELL"):
|
||||||
# Determine order size (simplified: 1 lot)
|
# Determine order size (simplified: 1 lot)
|
||||||
quantity = 1
|
quantity = 1
|
||||||
@@ -314,6 +349,18 @@ async def trading_cycle(
|
|||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning("Telegram notification failed: %s", exc)
|
logger.warning("Telegram notification failed: %s", exc)
|
||||||
|
|
||||||
|
if decision.action == "SELL":
|
||||||
|
buy_trade = get_latest_buy_trade(db_conn, stock_code, market.code)
|
||||||
|
if buy_trade and buy_trade.get("price") is not None:
|
||||||
|
buy_price = float(buy_trade["price"])
|
||||||
|
buy_qty = int(buy_trade.get("quantity") or 1)
|
||||||
|
trade_pnl = (trade_price - buy_price) * buy_qty
|
||||||
|
decision_logger.update_outcome(
|
||||||
|
decision_id=buy_trade["decision_id"],
|
||||||
|
pnl=trade_pnl,
|
||||||
|
accuracy=1 if trade_pnl > 0 else 0,
|
||||||
|
)
|
||||||
|
|
||||||
# 6. Log trade with selection context
|
# 6. Log trade with selection context
|
||||||
selection_context = None
|
selection_context = None
|
||||||
if stock_code in market_candidates:
|
if stock_code in market_candidates:
|
||||||
@@ -331,9 +378,13 @@ async def trading_cycle(
|
|||||||
action=decision.action,
|
action=decision.action,
|
||||||
confidence=decision.confidence,
|
confidence=decision.confidence,
|
||||||
rationale=decision.rationale,
|
rationale=decision.rationale,
|
||||||
|
quantity=quantity,
|
||||||
|
price=trade_price,
|
||||||
|
pnl=trade_pnl,
|
||||||
market=market.code,
|
market=market.code,
|
||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
selection_context=selection_context,
|
selection_context=selection_context,
|
||||||
|
decision_id=decision_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 7. Latency monitoring
|
# 7. Latency monitoring
|
||||||
@@ -568,7 +619,7 @@ async def run_daily_session(
|
|||||||
"pnl_pct": pnl_pct,
|
"pnl_pct": pnl_pct,
|
||||||
}
|
}
|
||||||
|
|
||||||
decision_logger.log_decision(
|
decision_id = decision_logger.log_decision(
|
||||||
stock_code=stock_code,
|
stock_code=stock_code,
|
||||||
market=market.code,
|
market=market.code,
|
||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
@@ -580,6 +631,9 @@ async def run_daily_session(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Execute if actionable
|
# Execute if actionable
|
||||||
|
quantity = 0
|
||||||
|
trade_price = stock_data["current_price"]
|
||||||
|
trade_pnl = 0.0
|
||||||
if decision.action in ("BUY", "SELL"):
|
if decision.action in ("BUY", "SELL"):
|
||||||
quantity = 1
|
quantity = 1
|
||||||
order_amount = stock_data["current_price"] * quantity
|
order_amount = stock_data["current_price"] * quantity
|
||||||
@@ -652,6 +706,18 @@ async def run_daily_session(
|
|||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if decision.action == "SELL":
|
||||||
|
buy_trade = get_latest_buy_trade(db_conn, stock_code, market.code)
|
||||||
|
if buy_trade and buy_trade.get("price") is not None:
|
||||||
|
buy_price = float(buy_trade["price"])
|
||||||
|
buy_qty = int(buy_trade.get("quantity") or 1)
|
||||||
|
trade_pnl = (trade_price - buy_price) * buy_qty
|
||||||
|
decision_logger.update_outcome(
|
||||||
|
decision_id=buy_trade["decision_id"],
|
||||||
|
pnl=trade_pnl,
|
||||||
|
accuracy=1 if trade_pnl > 0 else 0,
|
||||||
|
)
|
||||||
|
|
||||||
# Log trade
|
# Log trade
|
||||||
log_trade(
|
log_trade(
|
||||||
conn=db_conn,
|
conn=db_conn,
|
||||||
@@ -659,8 +725,12 @@ async def run_daily_session(
|
|||||||
action=decision.action,
|
action=decision.action,
|
||||||
confidence=decision.confidence,
|
confidence=decision.confidence,
|
||||||
rationale=decision.rationale,
|
rationale=decision.rationale,
|
||||||
|
quantity=quantity,
|
||||||
|
price=trade_price,
|
||||||
|
pnl=trade_pnl,
|
||||||
market=market.code,
|
market=market.code,
|
||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
|
decision_id=decision_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Daily trading session completed")
|
logger.info("Daily trading session completed")
|
||||||
@@ -675,6 +745,7 @@ async def run(settings: Settings) -> None:
|
|||||||
db_conn = init_db(settings.DB_PATH)
|
db_conn = init_db(settings.DB_PATH)
|
||||||
decision_logger = DecisionLogger(db_conn)
|
decision_logger = DecisionLogger(db_conn)
|
||||||
context_store = ContextStore(db_conn)
|
context_store = ContextStore(db_conn)
|
||||||
|
context_aggregator = ContextAggregator(db_conn)
|
||||||
|
|
||||||
# V2 proactive strategy components
|
# V2 proactive strategy components
|
||||||
context_selector = ContextSelector(context_store)
|
context_selector = ContextSelector(context_store)
|
||||||
@@ -835,15 +906,6 @@ async def run(settings: Settings) -> None:
|
|||||||
|
|
||||||
# Initialize volatility hunter
|
# Initialize volatility hunter
|
||||||
volatility_analyzer = VolatilityAnalyzer(min_volume_surge=2.0, min_price_change=1.0)
|
volatility_analyzer = VolatilityAnalyzer(min_volume_surge=2.0, min_price_change=1.0)
|
||||||
market_scanner = MarketScanner(
|
|
||||||
broker=broker,
|
|
||||||
overseas_broker=overseas_broker,
|
|
||||||
volatility_analyzer=volatility_analyzer,
|
|
||||||
context_store=context_store,
|
|
||||||
top_n=5,
|
|
||||||
max_concurrent_scans=1, # Fully serialized to avoid EGW00201
|
|
||||||
)
|
|
||||||
|
|
||||||
# Initialize smart scanner (Python-first, AI-last pipeline)
|
# Initialize smart scanner (Python-first, AI-last pipeline)
|
||||||
smart_scanner = SmartVolatilityScanner(
|
smart_scanner = SmartVolatilityScanner(
|
||||||
broker=broker,
|
broker=broker,
|
||||||
@@ -968,6 +1030,13 @@ async def run(settings: Settings) -> None:
|
|||||||
market_info = MARKETS.get(market_code)
|
market_info = MARKETS.get(market_code)
|
||||||
if market_info:
|
if market_info:
|
||||||
await telegram.notify_market_close(market_info.name, 0.0)
|
await telegram.notify_market_close(market_info.name, 0.0)
|
||||||
|
market_date = datetime.now(
|
||||||
|
market_info.timezone
|
||||||
|
).date().isoformat()
|
||||||
|
context_aggregator.aggregate_daily_from_trades(
|
||||||
|
date=market_date,
|
||||||
|
market=market_code,
|
||||||
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning("Market close notification failed: %s", exc)
|
logger.warning("Market close notification failed: %s", exc)
|
||||||
_market_states[market_code] = False
|
_market_states[market_code] = False
|
||||||
|
|||||||
@@ -161,7 +161,7 @@ class TestContextAggregator:
|
|||||||
self, aggregator: ContextAggregator, db_conn: sqlite3.Connection
|
self, aggregator: ContextAggregator, db_conn: sqlite3.Connection
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test aggregating daily metrics from trades."""
|
"""Test aggregating daily metrics from trades."""
|
||||||
date = "2026-02-04"
|
date = datetime.now(UTC).date().isoformat()
|
||||||
|
|
||||||
# Create sample trades
|
# Create sample trades
|
||||||
log_trade(db_conn, "005930", "BUY", 85, "Good signal", quantity=10, price=70000, pnl=500)
|
log_trade(db_conn, "005930", "BUY", 85, "Good signal", quantity=10, price=70000, pnl=500)
|
||||||
@@ -175,36 +175,44 @@ class TestContextAggregator:
|
|||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
|
|
||||||
# Aggregate
|
# Aggregate
|
||||||
aggregator.aggregate_daily_from_trades(date)
|
aggregator.aggregate_daily_from_trades(date, market="KR")
|
||||||
|
|
||||||
# Verify L6 contexts
|
# Verify L6 contexts
|
||||||
store = aggregator.store
|
store = aggregator.store
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "trade_count") == 3
|
assert store.get_context(ContextLayer.L6_DAILY, date, "trade_count_KR") == 3
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "buys") == 1
|
assert store.get_context(ContextLayer.L6_DAILY, date, "buys_KR") == 1
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "sells") == 1
|
assert store.get_context(ContextLayer.L6_DAILY, date, "sells_KR") == 1
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "holds") == 1
|
assert store.get_context(ContextLayer.L6_DAILY, date, "holds_KR") == 1
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "total_pnl") == 2000.0
|
assert store.get_context(ContextLayer.L6_DAILY, date, "total_pnl_KR") == 2000.0
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "unique_stocks") == 3
|
assert store.get_context(ContextLayer.L6_DAILY, date, "unique_stocks_KR") == 3
|
||||||
# 2 wins, 0 losses
|
# 2 wins, 0 losses
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "win_rate") == 100.0
|
assert store.get_context(ContextLayer.L6_DAILY, date, "win_rate_KR") == 100.0
|
||||||
|
|
||||||
def test_aggregate_weekly_from_daily(self, aggregator: ContextAggregator) -> None:
|
def test_aggregate_weekly_from_daily(self, aggregator: ContextAggregator) -> None:
|
||||||
"""Test aggregating weekly metrics from daily."""
|
"""Test aggregating weekly metrics from daily."""
|
||||||
week = "2026-W06"
|
week = "2026-W06"
|
||||||
|
|
||||||
# Set daily contexts
|
# Set daily contexts
|
||||||
aggregator.store.set_context(ContextLayer.L6_DAILY, "2026-02-02", "total_pnl", 100.0)
|
aggregator.store.set_context(
|
||||||
aggregator.store.set_context(ContextLayer.L6_DAILY, "2026-02-03", "total_pnl", 200.0)
|
ContextLayer.L6_DAILY, "2026-02-02", "total_pnl_KR", 100.0
|
||||||
aggregator.store.set_context(ContextLayer.L6_DAILY, "2026-02-02", "avg_confidence", 80.0)
|
)
|
||||||
aggregator.store.set_context(ContextLayer.L6_DAILY, "2026-02-03", "avg_confidence", 85.0)
|
aggregator.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, "2026-02-03", "total_pnl_KR", 200.0
|
||||||
|
)
|
||||||
|
aggregator.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, "2026-02-02", "avg_confidence_KR", 80.0
|
||||||
|
)
|
||||||
|
aggregator.store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, "2026-02-03", "avg_confidence_KR", 85.0
|
||||||
|
)
|
||||||
|
|
||||||
# Aggregate
|
# Aggregate
|
||||||
aggregator.aggregate_weekly_from_daily(week)
|
aggregator.aggregate_weekly_from_daily(week)
|
||||||
|
|
||||||
# Verify L5 contexts
|
# Verify L5 contexts
|
||||||
store = aggregator.store
|
store = aggregator.store
|
||||||
weekly_pnl = store.get_context(ContextLayer.L5_WEEKLY, week, "weekly_pnl")
|
weekly_pnl = store.get_context(ContextLayer.L5_WEEKLY, week, "weekly_pnl_KR")
|
||||||
avg_conf = store.get_context(ContextLayer.L5_WEEKLY, week, "avg_confidence")
|
avg_conf = store.get_context(ContextLayer.L5_WEEKLY, week, "avg_confidence_KR")
|
||||||
|
|
||||||
assert weekly_pnl == 300.0
|
assert weekly_pnl == 300.0
|
||||||
assert avg_conf == 82.5
|
assert avg_conf == 82.5
|
||||||
@@ -214,9 +222,15 @@ class TestContextAggregator:
|
|||||||
month = "2026-02"
|
month = "2026-02"
|
||||||
|
|
||||||
# Set weekly contexts
|
# Set weekly contexts
|
||||||
aggregator.store.set_context(ContextLayer.L5_WEEKLY, "2026-W05", "weekly_pnl", 100.0)
|
aggregator.store.set_context(
|
||||||
aggregator.store.set_context(ContextLayer.L5_WEEKLY, "2026-W06", "weekly_pnl", 200.0)
|
ContextLayer.L5_WEEKLY, "2026-W05", "weekly_pnl_KR", 100.0
|
||||||
aggregator.store.set_context(ContextLayer.L5_WEEKLY, "2026-W07", "weekly_pnl", 150.0)
|
)
|
||||||
|
aggregator.store.set_context(
|
||||||
|
ContextLayer.L5_WEEKLY, "2026-W06", "weekly_pnl_KR", 200.0
|
||||||
|
)
|
||||||
|
aggregator.store.set_context(
|
||||||
|
ContextLayer.L5_WEEKLY, "2026-W07", "weekly_pnl_KR", 150.0
|
||||||
|
)
|
||||||
|
|
||||||
# Aggregate
|
# Aggregate
|
||||||
aggregator.aggregate_monthly_from_weekly(month)
|
aggregator.aggregate_monthly_from_weekly(month)
|
||||||
@@ -285,7 +299,7 @@ class TestContextAggregator:
|
|||||||
self, aggregator: ContextAggregator, db_conn: sqlite3.Connection
|
self, aggregator: ContextAggregator, db_conn: sqlite3.Connection
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test running all aggregations from L7 to L1."""
|
"""Test running all aggregations from L7 to L1."""
|
||||||
date = "2026-02-04"
|
date = datetime.now(UTC).date().isoformat()
|
||||||
|
|
||||||
# Create sample trades
|
# Create sample trades
|
||||||
log_trade(db_conn, "005930", "BUY", 85, "Good signal", quantity=10, price=70000, pnl=1000)
|
log_trade(db_conn, "005930", "BUY", 85, "Good signal", quantity=10, price=70000, pnl=1000)
|
||||||
@@ -299,10 +313,18 @@ class TestContextAggregator:
|
|||||||
|
|
||||||
# Verify data exists in each layer
|
# Verify data exists in each layer
|
||||||
store = aggregator.store
|
store = aggregator.store
|
||||||
assert store.get_context(ContextLayer.L6_DAILY, date, "total_pnl") == 1000.0
|
assert store.get_context(ContextLayer.L6_DAILY, date, "total_pnl_KR") == 1000.0
|
||||||
current_week = datetime.now(UTC).strftime("%Y-W%V")
|
from datetime import date as date_cls
|
||||||
assert store.get_context(ContextLayer.L5_WEEKLY, current_week, "weekly_pnl") is not None
|
trade_date = date_cls.fromisoformat(date)
|
||||||
# Further layers depend on time alignment, just verify no crashes
|
iso_year, iso_week, _ = trade_date.isocalendar()
|
||||||
|
trade_week = f"{iso_year}-W{iso_week:02d}"
|
||||||
|
assert store.get_context(ContextLayer.L5_WEEKLY, trade_week, "weekly_pnl_KR") is not None
|
||||||
|
trade_month = f"{trade_date.year}-{trade_date.month:02d}"
|
||||||
|
trade_quarter = f"{trade_date.year}-Q{(trade_date.month - 1) // 3 + 1}"
|
||||||
|
trade_year = str(trade_date.year)
|
||||||
|
assert store.get_context(ContextLayer.L4_MONTHLY, trade_month, "monthly_pnl") == 1000.0
|
||||||
|
assert store.get_context(ContextLayer.L3_QUARTERLY, trade_quarter, "quarterly_pnl") == 1000.0
|
||||||
|
assert store.get_context(ContextLayer.L2_ANNUAL, trade_year, "annual_pnl") == 1000.0
|
||||||
|
|
||||||
|
|
||||||
class TestLayerMetadata:
|
class TestLayerMetadata:
|
||||||
|
|||||||
104
tests/test_context_scheduler.py
Normal file
104
tests/test_context_scheduler.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
"""Tests for ContextScheduler."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
|
from src.context.scheduler import ContextScheduler
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class StubAggregator:
|
||||||
|
"""Stub aggregator that records calls."""
|
||||||
|
|
||||||
|
weekly_calls: list[str]
|
||||||
|
monthly_calls: list[str]
|
||||||
|
quarterly_calls: list[str]
|
||||||
|
annual_calls: list[str]
|
||||||
|
legacy_calls: int
|
||||||
|
|
||||||
|
def aggregate_weekly_from_daily(self, week: str) -> None:
|
||||||
|
self.weekly_calls.append(week)
|
||||||
|
|
||||||
|
def aggregate_monthly_from_weekly(self, month: str) -> None:
|
||||||
|
self.monthly_calls.append(month)
|
||||||
|
|
||||||
|
def aggregate_quarterly_from_monthly(self, quarter: str) -> None:
|
||||||
|
self.quarterly_calls.append(quarter)
|
||||||
|
|
||||||
|
def aggregate_annual_from_quarterly(self, year: str) -> None:
|
||||||
|
self.annual_calls.append(year)
|
||||||
|
|
||||||
|
def aggregate_legacy_from_annual(self) -> None:
|
||||||
|
self.legacy_calls += 1
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class StubStore:
|
||||||
|
"""Stub store that records cleanup calls."""
|
||||||
|
|
||||||
|
cleanup_calls: int = 0
|
||||||
|
|
||||||
|
def cleanup_expired_contexts(self) -> None:
|
||||||
|
self.cleanup_calls += 1
|
||||||
|
|
||||||
|
|
||||||
|
def make_scheduler() -> tuple[ContextScheduler, StubAggregator, StubStore]:
|
||||||
|
aggregator = StubAggregator([], [], [], [], 0)
|
||||||
|
store = StubStore()
|
||||||
|
scheduler = ContextScheduler(aggregator=aggregator, store=store)
|
||||||
|
return scheduler, aggregator, store
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_if_due_weekly() -> None:
|
||||||
|
scheduler, aggregator, store = make_scheduler()
|
||||||
|
now = datetime(2026, 2, 8, 10, 0, tzinfo=UTC) # Sunday
|
||||||
|
|
||||||
|
result = scheduler.run_if_due(now)
|
||||||
|
|
||||||
|
assert result.weekly is True
|
||||||
|
assert aggregator.weekly_calls == ["2026-W06"]
|
||||||
|
assert store.cleanup_calls == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_if_due_monthly() -> None:
|
||||||
|
scheduler, aggregator, _store = make_scheduler()
|
||||||
|
now = datetime(2026, 2, 28, 12, 0, tzinfo=UTC) # Last day of month
|
||||||
|
|
||||||
|
result = scheduler.run_if_due(now)
|
||||||
|
|
||||||
|
assert result.monthly is True
|
||||||
|
assert aggregator.monthly_calls == ["2026-02"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_if_due_quarterly() -> None:
|
||||||
|
scheduler, aggregator, _store = make_scheduler()
|
||||||
|
now = datetime(2026, 3, 31, 12, 0, tzinfo=UTC) # Last day of Q1
|
||||||
|
|
||||||
|
result = scheduler.run_if_due(now)
|
||||||
|
|
||||||
|
assert result.quarterly is True
|
||||||
|
assert aggregator.quarterly_calls == ["2026-Q1"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_if_due_annual_and_legacy() -> None:
|
||||||
|
scheduler, aggregator, _store = make_scheduler()
|
||||||
|
now = datetime(2026, 12, 31, 12, 0, tzinfo=UTC)
|
||||||
|
|
||||||
|
result = scheduler.run_if_due(now)
|
||||||
|
|
||||||
|
assert result.annual is True
|
||||||
|
assert result.legacy is True
|
||||||
|
assert aggregator.annual_calls == ["2026"]
|
||||||
|
assert aggregator.legacy_calls == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup_runs_once_per_day() -> None:
|
||||||
|
scheduler, _aggregator, store = make_scheduler()
|
||||||
|
now = datetime(2026, 2, 9, 9, 0, tzinfo=UTC)
|
||||||
|
|
||||||
|
scheduler.run_if_due(now)
|
||||||
|
scheduler.run_if_due(now)
|
||||||
|
|
||||||
|
assert store.cleanup_calls == 1
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
"""Tests for main trading loop integration."""
|
"""Tests for main trading loop integration."""
|
||||||
|
|
||||||
from datetime import date
|
from datetime import date
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
from unittest.mock import ANY, AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from src.context.layer import ContextLayer
|
||||||
from src.core.risk_manager import CircuitBreakerTripped, FatFingerRejected
|
from src.core.risk_manager import CircuitBreakerTripped, FatFingerRejected
|
||||||
|
from src.db import init_db, log_trade
|
||||||
|
from src.logging.decision_logger import DecisionLogger
|
||||||
from src.main import safe_float, trading_cycle
|
from src.main import safe_float, trading_cycle
|
||||||
from src.strategy.models import (
|
from src.strategy.models import (
|
||||||
DayPlaybook,
|
DayPlaybook,
|
||||||
@@ -43,6 +46,17 @@ def _make_hold_match(stock_code: str = "005930") -> ScenarioMatch:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_sell_match(stock_code: str = "005930") -> ScenarioMatch:
|
||||||
|
"""Create a ScenarioMatch that returns SELL."""
|
||||||
|
return ScenarioMatch(
|
||||||
|
stock_code=stock_code,
|
||||||
|
matched_scenario=None,
|
||||||
|
action=ScenarioAction.SELL,
|
||||||
|
confidence=90,
|
||||||
|
rationale="Test sell",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestSafeFloat:
|
class TestSafeFloat:
|
||||||
"""Test safe_float() helper function."""
|
"""Test safe_float() helper function."""
|
||||||
|
|
||||||
@@ -810,6 +824,69 @@ class TestScenarioEngineIntegration:
|
|||||||
assert "portfolio_pnl_pct" in portfolio_data
|
assert "portfolio_pnl_pct" in portfolio_data
|
||||||
assert "total_cash" in portfolio_data
|
assert "total_cash" in portfolio_data
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_trading_cycle_sets_l7_context_keys(
|
||||||
|
self, mock_broker: MagicMock, mock_market: MagicMock, mock_telegram: MagicMock,
|
||||||
|
) -> None:
|
||||||
|
"""Test L7 context is written with market-scoped keys."""
|
||||||
|
from src.analysis.smart_scanner import ScanCandidate
|
||||||
|
|
||||||
|
engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
engine.evaluate = MagicMock(return_value=_make_hold_match())
|
||||||
|
playbook = _make_playbook()
|
||||||
|
context_store = MagicMock(get_latest_timeframe=MagicMock(return_value=None))
|
||||||
|
|
||||||
|
candidate = ScanCandidate(
|
||||||
|
stock_code="005930", name="Samsung", price=50000,
|
||||||
|
volume=1000000, volume_ratio=3.5, rsi=25.0,
|
||||||
|
signal="oversold", score=85.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("src.main.log_trade"):
|
||||||
|
await trading_cycle(
|
||||||
|
broker=mock_broker,
|
||||||
|
overseas_broker=MagicMock(),
|
||||||
|
scenario_engine=engine,
|
||||||
|
playbook=playbook,
|
||||||
|
risk=MagicMock(),
|
||||||
|
db_conn=MagicMock(),
|
||||||
|
decision_logger=MagicMock(),
|
||||||
|
context_store=context_store,
|
||||||
|
criticality_assessor=MagicMock(
|
||||||
|
assess_market_conditions=MagicMock(return_value=MagicMock(value="NORMAL")),
|
||||||
|
get_timeout=MagicMock(return_value=5.0),
|
||||||
|
),
|
||||||
|
telegram=mock_telegram,
|
||||||
|
market=mock_market,
|
||||||
|
stock_code="005930",
|
||||||
|
scan_candidates={"KR": {"005930": candidate}},
|
||||||
|
)
|
||||||
|
|
||||||
|
context_store.set_context.assert_any_call(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
ANY,
|
||||||
|
"volatility_KR_005930",
|
||||||
|
{"momentum_score": 50.0, "volume_surge": 1.0, "price_change_1m": 0.0},
|
||||||
|
)
|
||||||
|
context_store.set_context.assert_any_call(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
ANY,
|
||||||
|
"price_KR_005930",
|
||||||
|
{"current_price": 50000.0},
|
||||||
|
)
|
||||||
|
context_store.set_context.assert_any_call(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
ANY,
|
||||||
|
"rsi_KR_005930",
|
||||||
|
{"rsi": 25.0},
|
||||||
|
)
|
||||||
|
context_store.set_context.assert_any_call(
|
||||||
|
ContextLayer.L7_REALTIME,
|
||||||
|
ANY,
|
||||||
|
"volume_ratio_KR_005930",
|
||||||
|
{"volume_ratio": 3.5},
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_scan_candidates_market_scoped(
|
async def test_scan_candidates_market_scoped(
|
||||||
self, mock_broker: MagicMock, mock_market: MagicMock, mock_telegram: MagicMock,
|
self, mock_broker: MagicMock, mock_market: MagicMock, mock_telegram: MagicMock,
|
||||||
@@ -1049,3 +1126,96 @@ class TestScenarioEngineIntegration:
|
|||||||
# REDUCE_ALL is not BUY or SELL — no order sent
|
# REDUCE_ALL is not BUY or SELL — no order sent
|
||||||
mock_broker.send_order.assert_not_called()
|
mock_broker.send_order.assert_not_called()
|
||||||
mock_telegram.notify_trade_execution.assert_not_called()
|
mock_telegram.notify_trade_execution.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sell_updates_original_buy_decision_outcome() -> None:
|
||||||
|
"""SELL should update the original BUY decision outcome in decision_logs."""
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
decision_logger = DecisionLogger(db_conn)
|
||||||
|
|
||||||
|
buy_decision_id = decision_logger.log_decision(
|
||||||
|
stock_code="005930",
|
||||||
|
market="KR",
|
||||||
|
exchange_code="KRX",
|
||||||
|
action="BUY",
|
||||||
|
confidence=85,
|
||||||
|
rationale="Initial buy",
|
||||||
|
context_snapshot={},
|
||||||
|
input_data={},
|
||||||
|
)
|
||||||
|
log_trade(
|
||||||
|
conn=db_conn,
|
||||||
|
stock_code="005930",
|
||||||
|
action="BUY",
|
||||||
|
confidence=85,
|
||||||
|
rationale="Initial buy",
|
||||||
|
quantity=1,
|
||||||
|
price=100.0,
|
||||||
|
pnl=0.0,
|
||||||
|
market="KR",
|
||||||
|
exchange_code="KRX",
|
||||||
|
decision_id=buy_decision_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
broker.get_orderbook = AsyncMock(
|
||||||
|
return_value={"output1": {"stck_prpr": "120", "frgn_ntby_qty": "0"}}
|
||||||
|
)
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value={
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"tot_evlu_amt": "100000",
|
||||||
|
"dnca_tot_amt": "10000",
|
||||||
|
"pchs_amt_smtl_amt": "90000",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
broker.send_order = AsyncMock(return_value={"msg1": "OK"})
|
||||||
|
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
engine.evaluate = MagicMock(return_value=_make_sell_match())
|
||||||
|
risk = MagicMock()
|
||||||
|
context_store = MagicMock(
|
||||||
|
get_latest_timeframe=MagicMock(return_value=None),
|
||||||
|
set_context=MagicMock(),
|
||||||
|
)
|
||||||
|
criticality_assessor = MagicMock(
|
||||||
|
assess_market_conditions=MagicMock(return_value=MagicMock(value="NORMAL")),
|
||||||
|
get_timeout=MagicMock(return_value=5.0),
|
||||||
|
)
|
||||||
|
telegram = MagicMock()
|
||||||
|
telegram.notify_trade_execution = AsyncMock()
|
||||||
|
telegram.notify_fat_finger = AsyncMock()
|
||||||
|
telegram.notify_circuit_breaker = AsyncMock()
|
||||||
|
telegram.notify_scenario_matched = AsyncMock()
|
||||||
|
|
||||||
|
market = MagicMock()
|
||||||
|
market.name = "Korea"
|
||||||
|
market.code = "KR"
|
||||||
|
market.exchange_code = "KRX"
|
||||||
|
market.is_domestic = True
|
||||||
|
|
||||||
|
await trading_cycle(
|
||||||
|
broker=broker,
|
||||||
|
overseas_broker=overseas_broker,
|
||||||
|
scenario_engine=engine,
|
||||||
|
playbook=_make_playbook(),
|
||||||
|
risk=risk,
|
||||||
|
db_conn=db_conn,
|
||||||
|
decision_logger=decision_logger,
|
||||||
|
context_store=context_store,
|
||||||
|
criticality_assessor=criticality_assessor,
|
||||||
|
telegram=telegram,
|
||||||
|
market=market,
|
||||||
|
stock_code="005930",
|
||||||
|
scan_candidates={},
|
||||||
|
)
|
||||||
|
|
||||||
|
updated_buy = decision_logger.get_decision_by_id(buy_decision_id)
|
||||||
|
assert updated_buy is not None
|
||||||
|
assert updated_buy.outcome_pnl == 20.0
|
||||||
|
assert updated_buy.outcome_accuracy == 1
|
||||||
|
|||||||
81
tests/test_scorecard.py
Normal file
81
tests/test_scorecard.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
"""Tests for DailyScorecard model."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from src.evolution.scorecard import DailyScorecard
|
||||||
|
|
||||||
|
|
||||||
|
def test_scorecard_initialization() -> None:
|
||||||
|
scorecard = DailyScorecard(
|
||||||
|
date="2026-02-08",
|
||||||
|
market="KR",
|
||||||
|
total_decisions=10,
|
||||||
|
buys=3,
|
||||||
|
sells=2,
|
||||||
|
holds=5,
|
||||||
|
total_pnl=1234.5,
|
||||||
|
win_rate=60.0,
|
||||||
|
avg_confidence=78.5,
|
||||||
|
scenario_match_rate=70.0,
|
||||||
|
top_winners=["005930", "000660"],
|
||||||
|
top_losers=["035420"],
|
||||||
|
lessons=["Avoid chasing breakouts"],
|
||||||
|
cross_market_note="US volatility spillover",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert scorecard.market == "KR"
|
||||||
|
assert scorecard.total_decisions == 10
|
||||||
|
assert scorecard.total_pnl == 1234.5
|
||||||
|
assert scorecard.top_winners == ["005930", "000660"]
|
||||||
|
assert scorecard.lessons == ["Avoid chasing breakouts"]
|
||||||
|
assert scorecard.cross_market_note == "US volatility spillover"
|
||||||
|
|
||||||
|
|
||||||
|
def test_scorecard_defaults() -> None:
|
||||||
|
scorecard = DailyScorecard(
|
||||||
|
date="2026-02-08",
|
||||||
|
market="US",
|
||||||
|
total_decisions=0,
|
||||||
|
buys=0,
|
||||||
|
sells=0,
|
||||||
|
holds=0,
|
||||||
|
total_pnl=0.0,
|
||||||
|
win_rate=0.0,
|
||||||
|
avg_confidence=0.0,
|
||||||
|
scenario_match_rate=0.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert scorecard.top_winners == []
|
||||||
|
assert scorecard.top_losers == []
|
||||||
|
assert scorecard.lessons == []
|
||||||
|
assert scorecard.cross_market_note == ""
|
||||||
|
|
||||||
|
|
||||||
|
def test_scorecard_list_isolation() -> None:
|
||||||
|
a = DailyScorecard(
|
||||||
|
date="2026-02-08",
|
||||||
|
market="KR",
|
||||||
|
total_decisions=1,
|
||||||
|
buys=1,
|
||||||
|
sells=0,
|
||||||
|
holds=0,
|
||||||
|
total_pnl=10.0,
|
||||||
|
win_rate=100.0,
|
||||||
|
avg_confidence=90.0,
|
||||||
|
scenario_match_rate=100.0,
|
||||||
|
)
|
||||||
|
b = DailyScorecard(
|
||||||
|
date="2026-02-08",
|
||||||
|
market="US",
|
||||||
|
total_decisions=1,
|
||||||
|
buys=0,
|
||||||
|
sells=1,
|
||||||
|
holds=0,
|
||||||
|
total_pnl=-5.0,
|
||||||
|
win_rate=0.0,
|
||||||
|
avg_confidence=60.0,
|
||||||
|
scenario_match_rate=50.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
a.top_winners.append("005930")
|
||||||
|
assert b.top_winners == []
|
||||||
@@ -412,7 +412,7 @@ class TestMarketScanner:
|
|||||||
scan_result = context_store.get_context(
|
scan_result = context_store.get_context(
|
||||||
ContextLayer.L7_REALTIME,
|
ContextLayer.L7_REALTIME,
|
||||||
latest_timeframe,
|
latest_timeframe,
|
||||||
"KR_scan_result",
|
"scan_result_KR",
|
||||||
)
|
)
|
||||||
assert scan_result is not None
|
assert scan_result is not None
|
||||||
assert scan_result["total_scanned"] == 3
|
assert scan_result["total_scanned"] == 3
|
||||||
|
|||||||
Reference in New Issue
Block a user