Compare commits
35 Commits
feature/is
...
1a1fe7e637
| Author | SHA1 | Date | |
|---|---|---|---|
| 1a1fe7e637 | |||
|
|
2e27000760 | ||
| 5a41f86112 | |||
|
|
ff9c4d6082 | ||
| 25ad4776c9 | |||
|
|
9339824e22 | ||
| e6eae6c6e0 | |||
| bb6bd0392e | |||
| a66181b7a7 | |||
| da585ee547 | |||
| c737d5009a | |||
|
|
f7d33e69d1 | ||
|
|
7d99d8ec4a | ||
|
|
0727f28f77 | ||
|
|
ac4fb00644 | ||
|
|
4fc4a57036 | ||
| 641f3e8811 | |||
|
|
ebd0a0297c | ||
| 02a72e0f7e | |||
| 478a659ac2 | |||
|
|
16b9b6832d | ||
|
|
48b87a79f6 | ||
|
|
ad79082dcc | ||
|
|
11dff9d3e5 | ||
|
|
3c5f1752e6 | ||
|
|
d6a389e0b7 | ||
| cd36d53a47 | |||
|
|
1242794fc4 | ||
| b45d136894 | |||
|
|
ce82121f04 | ||
| 0e2987e66d | |||
|
|
cdd5a218a7 | ||
|
|
f3491e94e4 | ||
|
|
342511a6ed | ||
| 2d5912dc08 |
64
.env.example
64
.env.example
@@ -1,36 +1,82 @@
|
|||||||
|
# ============================================================
|
||||||
|
# The Ouroboros — Environment Configuration
|
||||||
|
# ============================================================
|
||||||
|
# Copy this file to .env and fill in your values.
|
||||||
|
# Lines starting with # are comments.
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
# Korea Investment Securities API
|
# Korea Investment Securities API
|
||||||
|
# ============================================================
|
||||||
KIS_APP_KEY=your_app_key_here
|
KIS_APP_KEY=your_app_key_here
|
||||||
KIS_APP_SECRET=your_app_secret_here
|
KIS_APP_SECRET=your_app_secret_here
|
||||||
KIS_ACCOUNT_NO=12345678-01
|
KIS_ACCOUNT_NO=12345678-01
|
||||||
KIS_BASE_URL=https://openapivts.koreainvestment.com:9443
|
|
||||||
|
|
||||||
|
# Paper trading (VTS): https://openapivts.koreainvestment.com:29443
|
||||||
|
# Live trading: https://openapi.koreainvestment.com:9443
|
||||||
|
KIS_BASE_URL=https://openapivts.koreainvestment.com:29443
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
|
# Trading Mode
|
||||||
|
# ============================================================
|
||||||
|
# paper = 모의투자 (safe for testing), live = 실전투자 (real money)
|
||||||
|
MODE=paper
|
||||||
|
|
||||||
|
# daily = batch per session, realtime = per-stock continuous scan
|
||||||
|
TRADE_MODE=daily
|
||||||
|
|
||||||
|
# Comma-separated market codes: KR, US, JP, HK, CN, VN
|
||||||
|
ENABLED_MARKETS=KR,US
|
||||||
|
|
||||||
|
# Simulated USD cash for paper (VTS) overseas trading.
|
||||||
|
# VTS overseas balance API often returns 0; this value is used as fallback.
|
||||||
|
# Set to 0 to disable fallback (not used in live mode).
|
||||||
|
PAPER_OVERSEAS_CASH=50000.0
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
# Google Gemini
|
# Google Gemini
|
||||||
|
# ============================================================
|
||||||
GEMINI_API_KEY=your_gemini_api_key_here
|
GEMINI_API_KEY=your_gemini_api_key_here
|
||||||
GEMINI_MODEL=gemini-pro
|
# Recommended: gemini-2.0-flash-exp or gemini-1.5-pro
|
||||||
|
GEMINI_MODEL=gemini-2.0-flash-exp
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
# Risk Management
|
# Risk Management
|
||||||
|
# ============================================================
|
||||||
CIRCUIT_BREAKER_PCT=-3.0
|
CIRCUIT_BREAKER_PCT=-3.0
|
||||||
FAT_FINGER_PCT=30.0
|
FAT_FINGER_PCT=30.0
|
||||||
CONFIDENCE_THRESHOLD=80
|
CONFIDENCE_THRESHOLD=80
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
# Database
|
# Database
|
||||||
|
# ============================================================
|
||||||
DB_PATH=data/trade_logs.db
|
DB_PATH=data/trade_logs.db
|
||||||
|
|
||||||
# Rate Limiting (requests per second for KIS API)
|
# ============================================================
|
||||||
# Reduced to 5.0 to avoid "초당 거래건수 초과" errors (EGW00201)
|
# Rate Limiting
|
||||||
RATE_LIMIT_RPS=5.0
|
# ============================================================
|
||||||
|
# KIS API real limit is ~2 RPS. Keep at 2.0 for maximum safety.
|
||||||
|
# Increasing this risks EGW00201 "초당 거래건수 초과" errors.
|
||||||
|
RATE_LIMIT_RPS=2.0
|
||||||
|
|
||||||
# Trading Mode (paper / live)
|
# ============================================================
|
||||||
MODE=paper
|
# External Data APIs (optional)
|
||||||
|
# ============================================================
|
||||||
# External Data APIs (optional — for enhanced decision-making)
|
|
||||||
# NEWS_API_KEY=your_news_api_key_here
|
# NEWS_API_KEY=your_news_api_key_here
|
||||||
# NEWS_API_PROVIDER=alphavantage
|
# NEWS_API_PROVIDER=alphavantage
|
||||||
# MARKET_DATA_API_KEY=your_market_data_key_here
|
# MARKET_DATA_API_KEY=your_market_data_key_here
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
# Telegram Notifications (optional)
|
# Telegram Notifications (optional)
|
||||||
|
# ============================================================
|
||||||
# Get bot token from @BotFather on Telegram
|
# Get bot token from @BotFather on Telegram
|
||||||
# Get chat ID from @userinfobot or your chat
|
# Get chat ID from @userinfobot or your chat
|
||||||
# TELEGRAM_BOT_TOKEN=1234567890:ABCdefGHIjklMNOpqrsTUVwxyz
|
# TELEGRAM_BOT_TOKEN=1234567890:ABCdefGHIjklMNOpqrsTUVwxyz
|
||||||
# TELEGRAM_CHAT_ID=123456789
|
# TELEGRAM_CHAT_ID=123456789
|
||||||
# TELEGRAM_ENABLED=true
|
# TELEGRAM_ENABLED=true
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
|
# Dashboard (optional)
|
||||||
|
# ============================================================
|
||||||
|
# DASHBOARD_ENABLED=false
|
||||||
|
# DASHBOARD_HOST=127.0.0.1
|
||||||
|
# DASHBOARD_PORT=8080
|
||||||
|
|||||||
@@ -94,6 +94,7 @@ Smart Scanner runs in `TRADE_MODE=realtime` only. Daily mode uses static watchli
|
|||||||
- **[Testing](docs/testing.md)** — Test structure, coverage requirements, writing tests
|
- **[Testing](docs/testing.md)** — Test structure, coverage requirements, writing tests
|
||||||
- **[Agent Policies](docs/agents.md)** — Prime directives, constraints, prohibited actions
|
- **[Agent Policies](docs/agents.md)** — Prime directives, constraints, prohibited actions
|
||||||
- **[Requirements Log](docs/requirements-log.md)** — User requirements and feedback tracking
|
- **[Requirements Log](docs/requirements-log.md)** — User requirements and feedback tracking
|
||||||
|
- **[Live Trading Checklist](docs/live-trading-checklist.md)** — 모의→실전 전환 체크리스트
|
||||||
|
|
||||||
## Core Principles
|
## Core Principles
|
||||||
|
|
||||||
@@ -170,7 +171,7 @@ Markets auto-detected based on timezone and enabled in `ENABLED_MARKETS` env var
|
|||||||
- `src/core/risk_manager.py` is **READ-ONLY** — changes require human approval
|
- `src/core/risk_manager.py` is **READ-ONLY** — changes require human approval
|
||||||
- Circuit breaker at -3.0% P&L — may only be made **stricter**
|
- Circuit breaker at -3.0% P&L — may only be made **stricter**
|
||||||
- Fat-finger protection: max 30% of cash per order — always enforced
|
- Fat-finger protection: max 30% of cash per order — always enforced
|
||||||
- Confidence < 80 → force HOLD — cannot be weakened
|
- Confidence 임계값 (market_outlook별, 낮출 수 없음): BEARISH ≥ 90, NEUTRAL/기본 ≥ 80, BULLISH ≥ 75
|
||||||
- All code changes → corresponding tests → coverage ≥ 80%
|
- All code changes → corresponding tests → coverage ≥ 80%
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|||||||
131
docs/live-trading-checklist.md
Normal file
131
docs/live-trading-checklist.md
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
# 실전 전환 체크리스트
|
||||||
|
|
||||||
|
모의 거래(paper)에서 실전(live)으로 전환하기 전에 아래 항목을 **순서대로** 모두 확인하세요.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. 사전 조건
|
||||||
|
|
||||||
|
### 1-1. KIS OpenAPI 실전 계좌 준비
|
||||||
|
- [ ] 한국투자증권 계좌 개설 완료 (일반 위탁 계좌)
|
||||||
|
- [ ] OpenAPI 실전 사용 신청 (KIS 홈페이지 → Open API → 서비스 신청)
|
||||||
|
- [ ] 실전용 APP_KEY / APP_SECRET 발급 완료
|
||||||
|
- [ ] KIS_ACCOUNT_NO 형식 확인: `XXXXXXXX-XX` (8자리-2자리)
|
||||||
|
|
||||||
|
### 1-2. 리스크 파라미터 검토
|
||||||
|
- [ ] `CIRCUIT_BREAKER_PCT` 확인: 기본값 -3.0% (더 엄격하게 조정 권장)
|
||||||
|
- [ ] `FAT_FINGER_PCT` 확인: 기본값 30.0% (1회 주문 최대 잔고 대비 %)
|
||||||
|
- [ ] `CONFIDENCE_THRESHOLD` 확인: BEARISH ≥ 90, NEUTRAL ≥ 80, BULLISH ≥ 75
|
||||||
|
- [ ] 초기 투자금 결정 및 해외 주식 운용 한도 설정
|
||||||
|
|
||||||
|
### 1-3. 시스템 요건
|
||||||
|
- [ ] 커버리지 80% 이상 유지 확인: `pytest --cov=src`
|
||||||
|
- [ ] 타입 체크 통과: `mypy src/ --strict`
|
||||||
|
- [ ] Lint 통과: `ruff check src/ tests/`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. 환경 설정
|
||||||
|
|
||||||
|
### 2-1. `.env` 파일 수정
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. KIS 실전 URL로 변경 (모의: openapivts 포트 29443)
|
||||||
|
KIS_BASE_URL=https://openapi.koreainvestment.com:9443
|
||||||
|
|
||||||
|
# 2. 실전 APP_KEY / APP_SECRET으로 교체
|
||||||
|
KIS_APP_KEY=<실전_APP_KEY>
|
||||||
|
KIS_APP_SECRET=<실전_APP_SECRET>
|
||||||
|
KIS_ACCOUNT_NO=<실전_계좌번호>
|
||||||
|
|
||||||
|
# 3. 모드를 live로 변경
|
||||||
|
MODE=live
|
||||||
|
|
||||||
|
# 4. PAPER_OVERSEAS_CASH 비활성화 (live 모드에선 무시되지만 명시적으로 0 설정)
|
||||||
|
PAPER_OVERSEAS_CASH=0
|
||||||
|
```
|
||||||
|
|
||||||
|
> ⚠️ `KIS_BASE_URL` 포트 주의:
|
||||||
|
> - **모의(VTS)**: `https://openapivts.koreainvestment.com:29443`
|
||||||
|
> - **실전**: `https://openapi.koreainvestment.com:9443`
|
||||||
|
|
||||||
|
### 2-2. TR_ID 자동 분기 확인
|
||||||
|
|
||||||
|
아래 TR_ID는 `MODE` 값에 따라 코드에서 **자동으로 선택**됩니다.
|
||||||
|
별도 설정 불필요하나, 문제 발생 시 아래 표를 참조하세요.
|
||||||
|
|
||||||
|
| 구분 | 모의 TR_ID | 실전 TR_ID |
|
||||||
|
|------|-----------|-----------|
|
||||||
|
| 국내 잔고 조회 | `VTTC8434R` | `TTTC8434R` |
|
||||||
|
| 국내 현금 매수 | `VTTC0012U` | `TTTC0012U` |
|
||||||
|
| 국내 현금 매도 | `VTTC0011U` | `TTTC0011U` |
|
||||||
|
| 해외 잔고 조회 | `VTTS3012R` | `TTTS3012R` |
|
||||||
|
| 해외 매수 | `VTTT1002U` | `TTTT1002U` |
|
||||||
|
| 해외 매도 | `VTTT1001U` | `TTTT1006U` |
|
||||||
|
|
||||||
|
> **출처**: `docs/한국투자증권_오픈API_전체문서_20260221_030000.xlsx` (공식 문서 기준)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. 최종 확인
|
||||||
|
|
||||||
|
### 3-1. 실전 시작 전 점검
|
||||||
|
- [ ] DB 백업 완료: `data/trade_logs.db` → `data/backups/`
|
||||||
|
- [ ] Telegram 알림 설정 확인 (실전에서는 알림이 더욱 중요)
|
||||||
|
- [ ] 소액으로 첫 거래 진행 후 TR_ID/계좌 정상 동작 확인
|
||||||
|
|
||||||
|
### 3-2. 실행 명령
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 실전 모드로 실행
|
||||||
|
python -m src.main --mode=live
|
||||||
|
|
||||||
|
# 대시보드 함께 실행 (별도 터미널에서 모니터링)
|
||||||
|
python -m src.main --mode=live --dashboard
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3-3. 실전 시작 직후 확인 사항
|
||||||
|
- [ ] 로그에 `MODE=live` 출력 확인
|
||||||
|
- [ ] 첫 잔고 조회 성공 (ConnectionError 없음)
|
||||||
|
- [ ] Telegram 알림 수신 확인 ("System started")
|
||||||
|
- [ ] 첫 주문 후 KIS 앱에서 체결 내역 확인
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. 비상 정지 방법
|
||||||
|
|
||||||
|
### 즉각 정지
|
||||||
|
```bash
|
||||||
|
# 터미널에서 Ctrl+C (정상 종료 트리거)
|
||||||
|
# 또는 Telegram 봇 명령:
|
||||||
|
/stop
|
||||||
|
```
|
||||||
|
|
||||||
|
### Circuit Breaker 발동 시
|
||||||
|
- CB가 발동되면 자동으로 거래 중단 및 Telegram 알림 전송
|
||||||
|
- CB 임계값: `CIRCUIT_BREAKER_PCT` (기본 -3.0%)
|
||||||
|
- **임계값은 엄격하게만 조정 가능** (더 낮은 음수 값으로만 변경)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. 롤백 절차
|
||||||
|
|
||||||
|
실전 전환 후 문제 발생 시:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. 즉시 .env에서 MODE=paper로 복원
|
||||||
|
# 2. 재시작
|
||||||
|
python -m src.main --mode=paper
|
||||||
|
|
||||||
|
# 3. DB에서 최근 거래 확인
|
||||||
|
sqlite3 data/trade_logs.db "SELECT * FROM trades ORDER BY id DESC LIMIT 20;"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 관련 문서
|
||||||
|
|
||||||
|
- [시스템 아키텍처](architecture.md)
|
||||||
|
- [워크플로우 가이드](workflow.md)
|
||||||
|
- [재해 복구](disaster_recovery.md)
|
||||||
|
- [Agent 제약 조건](agents.md)
|
||||||
@@ -285,7 +285,10 @@ class KISBroker:
|
|||||||
await self._rate_limiter.acquire()
|
await self._rate_limiter.acquire()
|
||||||
session = self._get_session()
|
session = self._get_session()
|
||||||
|
|
||||||
headers = await self._auth_headers("VTTC8434R") # 모의투자 잔고조회
|
# TR_ID: 실전 TTTC8434R, 모의 VTTC8434R
|
||||||
|
# Source: 한국투자증권 오픈API 전체문서 (20260221) — '국내주식 잔고조회' 시트
|
||||||
|
tr_id = "TTTC8434R" if self._settings.MODE == "live" else "VTTC8434R"
|
||||||
|
headers = await self._auth_headers(tr_id)
|
||||||
params = {
|
params = {
|
||||||
"CANO": self._account_no,
|
"CANO": self._account_no,
|
||||||
"ACNT_PRDT_CD": self._product_cd,
|
"ACNT_PRDT_CD": self._product_cd,
|
||||||
@@ -330,7 +333,13 @@ class KISBroker:
|
|||||||
await self._rate_limiter.acquire()
|
await self._rate_limiter.acquire()
|
||||||
session = self._get_session()
|
session = self._get_session()
|
||||||
|
|
||||||
tr_id = "VTTC0802U" if order_type == "BUY" else "VTTC0801U"
|
# TR_ID: 실전 BUY=TTTC0012U SELL=TTTC0011U, 모의 BUY=VTTC0012U SELL=VTTC0011U
|
||||||
|
# Source: 한국투자증권 오픈API 전체문서 (20260221) — '주식주문(현금)' 시트
|
||||||
|
# ※ TTTC0802U/VTTC0802U는 미수매수(증거금40% 계좌 전용) — 현금주문에 사용 금지
|
||||||
|
if self._settings.MODE == "live":
|
||||||
|
tr_id = "TTTC0012U" if order_type == "BUY" else "TTTC0011U"
|
||||||
|
else:
|
||||||
|
tr_id = "VTTC0012U" if order_type == "BUY" else "VTTC0011U"
|
||||||
|
|
||||||
# KRX requires limit orders to be rounded down to the tick unit.
|
# KRX requires limit orders to be rounded down to the tick unit.
|
||||||
# ORD_DVSN: "00"=지정가, "01"=시장가
|
# ORD_DVSN: "00"=지정가, "01"=시장가
|
||||||
|
|||||||
@@ -175,8 +175,12 @@ class OverseasBroker:
|
|||||||
await self._broker._rate_limiter.acquire()
|
await self._broker._rate_limiter.acquire()
|
||||||
session = self._broker._get_session()
|
session = self._broker._get_session()
|
||||||
|
|
||||||
# Virtual trading TR_ID for overseas balance inquiry
|
# TR_ID: 실전 TTTS3012R, 모의 VTTS3012R
|
||||||
headers = await self._broker._auth_headers("VTTS3012R")
|
# Source: 한국투자증권 오픈API 전체문서 (20260221) — '해외주식 잔고조회' 시트
|
||||||
|
balance_tr_id = (
|
||||||
|
"TTTS3012R" if self._broker._settings.MODE == "live" else "VTTS3012R"
|
||||||
|
)
|
||||||
|
headers = await self._broker._auth_headers(balance_tr_id)
|
||||||
params = {
|
params = {
|
||||||
"CANO": self._broker._account_no,
|
"CANO": self._broker._account_no,
|
||||||
"ACNT_PRDT_CD": self._broker._product_cd,
|
"ACNT_PRDT_CD": self._broker._product_cd,
|
||||||
@@ -229,10 +233,12 @@ class OverseasBroker:
|
|||||||
await self._broker._rate_limiter.acquire()
|
await self._broker._rate_limiter.acquire()
|
||||||
session = self._broker._get_session()
|
session = self._broker._get_session()
|
||||||
|
|
||||||
# Virtual trading TR_IDs for overseas orders
|
# TR_ID: 실전 BUY=TTTT1002U SELL=TTTT1006U, 모의 BUY=VTTT1002U SELL=VTTT1001U
|
||||||
# Source: 한국투자증권 오픈API 전체문서 (20260221) — '해외주식 주문' 시트
|
# Source: 한국투자증권 오픈API 전체문서 (20260221) — '해외주식 주문' 시트
|
||||||
# VTTT1002U: 모의투자 미국 매수, VTTT1001U: 모의투자 미국 매도
|
if self._broker._settings.MODE == "live":
|
||||||
tr_id = "VTTT1002U" if order_type == "BUY" else "VTTT1001U"
|
tr_id = "TTTT1002U" if order_type == "BUY" else "TTTT1006U"
|
||||||
|
else:
|
||||||
|
tr_id = "VTTT1002U" if order_type == "BUY" else "VTTT1001U"
|
||||||
|
|
||||||
body = {
|
body = {
|
||||||
"CANO": self._broker._account_no,
|
"CANO": self._broker._account_no,
|
||||||
|
|||||||
@@ -13,11 +13,11 @@ class Settings(BaseSettings):
|
|||||||
KIS_APP_KEY: str
|
KIS_APP_KEY: str
|
||||||
KIS_APP_SECRET: str
|
KIS_APP_SECRET: str
|
||||||
KIS_ACCOUNT_NO: str # format: "XXXXXXXX-XX"
|
KIS_ACCOUNT_NO: str # format: "XXXXXXXX-XX"
|
||||||
KIS_BASE_URL: str = "https://openapivts.koreainvestment.com:9443"
|
KIS_BASE_URL: str = "https://openapivts.koreainvestment.com:29443"
|
||||||
|
|
||||||
# Google Gemini
|
# Google Gemini
|
||||||
GEMINI_API_KEY: str
|
GEMINI_API_KEY: str
|
||||||
GEMINI_MODEL: str = "gemini-pro"
|
GEMINI_MODEL: str = "gemini-2.0-flash"
|
||||||
|
|
||||||
# External Data APIs (optional — for data-driven decisions)
|
# External Data APIs (optional — for data-driven decisions)
|
||||||
NEWS_API_KEY: str | None = None
|
NEWS_API_KEY: str | None = None
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import sqlite3
|
import sqlite3
|
||||||
from datetime import UTC, datetime, timezone
|
from datetime import UTC, datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -79,6 +80,35 @@ def create_dashboard_app(db_path: str) -> FastAPI:
|
|||||||
total_pnl += market_status[market]["total_pnl"]
|
total_pnl += market_status[market]["total_pnl"]
|
||||||
total_decisions += market_status[market]["decision_count"]
|
total_decisions += market_status[market]["decision_count"]
|
||||||
|
|
||||||
|
cb_threshold = float(os.getenv("CIRCUIT_BREAKER_PCT", "-3.0"))
|
||||||
|
pnl_pct_rows = conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT key, value
|
||||||
|
FROM system_metrics
|
||||||
|
WHERE key LIKE 'portfolio_pnl_pct_%'
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT 20
|
||||||
|
"""
|
||||||
|
).fetchall()
|
||||||
|
current_pnl_pct: float | None = None
|
||||||
|
if pnl_pct_rows:
|
||||||
|
values = [
|
||||||
|
json.loads(row["value"]).get("pnl_pct")
|
||||||
|
for row in pnl_pct_rows
|
||||||
|
if json.loads(row["value"]).get("pnl_pct") is not None
|
||||||
|
]
|
||||||
|
if values:
|
||||||
|
current_pnl_pct = round(min(values), 4)
|
||||||
|
|
||||||
|
if current_pnl_pct is None:
|
||||||
|
cb_status = "unknown"
|
||||||
|
elif current_pnl_pct <= cb_threshold:
|
||||||
|
cb_status = "tripped"
|
||||||
|
elif current_pnl_pct <= cb_threshold + 1.0:
|
||||||
|
cb_status = "warning"
|
||||||
|
else:
|
||||||
|
cb_status = "ok"
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"date": today,
|
"date": today,
|
||||||
"markets": market_status,
|
"markets": market_status,
|
||||||
@@ -87,6 +117,11 @@ def create_dashboard_app(db_path: str) -> FastAPI:
|
|||||||
"total_pnl": round(total_pnl, 2),
|
"total_pnl": round(total_pnl, 2),
|
||||||
"decision_count": total_decisions,
|
"decision_count": total_decisions,
|
||||||
},
|
},
|
||||||
|
"circuit_breaker": {
|
||||||
|
"threshold_pct": cb_threshold,
|
||||||
|
"current_pnl_pct": current_pnl_pct,
|
||||||
|
"status": cb_status,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@app.get("/api/playbook/{date_str}")
|
@app.get("/api/playbook/{date_str}")
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
--muted: #9fb3c8;
|
--muted: #9fb3c8;
|
||||||
--accent: #3cb371;
|
--accent: #3cb371;
|
||||||
--red: #e05555;
|
--red: #e05555;
|
||||||
|
--warn: #e8a040;
|
||||||
--border: #28455f;
|
--border: #28455f;
|
||||||
}
|
}
|
||||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||||
@@ -43,6 +44,25 @@
|
|||||||
}
|
}
|
||||||
.refresh-btn:hover { border-color: var(--accent); color: var(--accent); }
|
.refresh-btn:hover { border-color: var(--accent); color: var(--accent); }
|
||||||
|
|
||||||
|
/* CB Gauge */
|
||||||
|
.cb-gauge-wrap {
|
||||||
|
display: flex; align-items: center; gap: 8px;
|
||||||
|
font-size: 11px; color: var(--muted);
|
||||||
|
}
|
||||||
|
.cb-dot {
|
||||||
|
width: 8px; height: 8px; border-radius: 50%; flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.cb-dot.ok { background: var(--accent); }
|
||||||
|
.cb-dot.warning { background: var(--warn); animation: pulse-warn 1.2s ease-in-out infinite; }
|
||||||
|
.cb-dot.tripped { background: var(--red); animation: pulse-warn 0.6s ease-in-out infinite; }
|
||||||
|
.cb-dot.unknown { background: var(--border); }
|
||||||
|
@keyframes pulse-warn {
|
||||||
|
0%, 100% { opacity: 1; }
|
||||||
|
50% { opacity: 0.35; }
|
||||||
|
}
|
||||||
|
.cb-bar-wrap { width: 64px; height: 5px; background: rgba(255,255,255,0.08); border-radius: 3px; overflow: hidden; }
|
||||||
|
.cb-bar-fill { height: 100%; border-radius: 3px; transition: width 0.4s, background 0.4s; }
|
||||||
|
|
||||||
/* Summary cards */
|
/* Summary cards */
|
||||||
.cards { display: grid; grid-template-columns: repeat(4, 1fr); gap: 12px; margin-bottom: 20px; }
|
.cards { display: grid; grid-template-columns: repeat(4, 1fr); gap: 12px; margin-bottom: 20px; }
|
||||||
@media (max-width: 700px) { .cards { grid-template-columns: repeat(2, 1fr); } }
|
@media (max-width: 700px) { .cards { grid-template-columns: repeat(2, 1fr); } }
|
||||||
@@ -152,6 +172,51 @@
|
|||||||
/* Spinner */
|
/* Spinner */
|
||||||
.spinner { display: inline-block; width: 12px; height: 12px; border: 2px solid var(--border); border-top-color: var(--accent); border-radius: 50%; animation: spin 0.8s linear infinite; }
|
.spinner { display: inline-block; width: 12px; height: 12px; border: 2px solid var(--border); border-top-color: var(--accent); border-radius: 50%; animation: spin 0.8s linear infinite; }
|
||||||
@keyframes spin { to { transform: rotate(360deg); } }
|
@keyframes spin { to { transform: rotate(360deg); } }
|
||||||
|
|
||||||
|
/* Generic panel */
|
||||||
|
.panel {
|
||||||
|
background: var(--panel);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: 16px;
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Playbook panel - details/summary accordion */
|
||||||
|
.playbook-panel details { border: 1px solid var(--border); border-radius: 4px; margin-bottom: 6px; }
|
||||||
|
.playbook-panel summary { padding: 8px 12px; cursor: pointer; font-weight: 600; background: var(--bg); color: var(--fg); }
|
||||||
|
.playbook-panel summary:hover { color: var(--accent); }
|
||||||
|
.playbook-panel pre { margin: 0; padding: 12px; background: var(--bg); overflow-x: auto;
|
||||||
|
font-size: 11px; color: #a0c4ff; white-space: pre-wrap; }
|
||||||
|
|
||||||
|
/* Scorecard KPI card grid */
|
||||||
|
.scorecard-grid { display: grid; grid-template-columns: repeat(auto-fill, minmax(140px, 1fr)); gap: 10px; }
|
||||||
|
.kpi-card { background: var(--bg); border: 1px solid var(--border); border-radius: 6px; padding: 12px; text-align: center; }
|
||||||
|
.kpi-card .kpi-label { font-size: 11px; color: var(--muted); margin-bottom: 4px; }
|
||||||
|
.kpi-card .kpi-value { font-size: 20px; font-weight: 700; color: var(--fg); }
|
||||||
|
|
||||||
|
/* Scenarios table */
|
||||||
|
.scenarios-table { width: 100%; border-collapse: collapse; font-size: 13px; }
|
||||||
|
.scenarios-table th { background: var(--bg); padding: 8px; text-align: left; border-bottom: 1px solid var(--border);
|
||||||
|
color: var(--muted); font-size: 11px; font-weight: 600; white-space: nowrap; }
|
||||||
|
.scenarios-table td { padding: 7px 8px; border-bottom: 1px solid rgba(40,69,95,0.5); }
|
||||||
|
.scenarios-table tr:hover td { background: rgba(255,255,255,0.02); }
|
||||||
|
|
||||||
|
/* Context table */
|
||||||
|
.context-table { width: 100%; border-collapse: collapse; font-size: 12px; }
|
||||||
|
.context-table th { background: var(--bg); padding: 8px; text-align: left; border-bottom: 1px solid var(--border);
|
||||||
|
color: var(--muted); font-size: 11px; font-weight: 600; white-space: nowrap; }
|
||||||
|
.context-table td { padding: 6px 8px; border-bottom: 1px solid rgba(40,69,95,0.5); vertical-align: top; }
|
||||||
|
.context-value { max-height: 60px; overflow-y: auto; color: #a0c4ff; word-break: break-all; }
|
||||||
|
|
||||||
|
/* Common panel select controls */
|
||||||
|
.panel-controls { display: flex; gap: 8px; align-items: center; flex-wrap: wrap; }
|
||||||
|
.panel-controls select, .panel-controls input[type="number"] {
|
||||||
|
background: var(--bg); color: var(--fg); border: 1px solid var(--border);
|
||||||
|
border-radius: 4px; padding: 4px 8px; font-size: 13px; font-family: inherit;
|
||||||
|
}
|
||||||
|
.panel-date { color: var(--muted); font-size: 12px; }
|
||||||
|
.empty-msg { color: var(--muted); text-align: center; padding: 20px 0; font-size: 12px; }
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@@ -160,6 +225,13 @@
|
|||||||
<header>
|
<header>
|
||||||
<h1>🐍 The Ouroboros</h1>
|
<h1>🐍 The Ouroboros</h1>
|
||||||
<div class="header-right">
|
<div class="header-right">
|
||||||
|
<div class="cb-gauge-wrap" id="cb-gauge" title="Circuit Breaker">
|
||||||
|
<span class="cb-dot unknown" id="cb-dot"></span>
|
||||||
|
<span id="cb-label">CB --</span>
|
||||||
|
<div class="cb-bar-wrap">
|
||||||
|
<div class="cb-bar-fill" id="cb-bar" style="width:0%;background:var(--accent)"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<span id="last-updated">--</span>
|
<span id="last-updated">--</span>
|
||||||
<button class="refresh-btn" onclick="refreshAll()">↺ 새로고침</button>
|
<button class="refresh-btn" onclick="refreshAll()">↺ 새로고침</button>
|
||||||
</div>
|
</div>
|
||||||
@@ -256,6 +328,72 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- playbook panel -->
|
||||||
|
<div class="panel playbook-panel">
|
||||||
|
<div class="panel-header">
|
||||||
|
<span class="panel-title">📋 프리마켓 플레이북</span>
|
||||||
|
<div class="panel-controls">
|
||||||
|
<select id="pb-market-select" onchange="fetchPlaybook()">
|
||||||
|
<option value="KR">KR</option>
|
||||||
|
<option value="US_NASDAQ">US_NASDAQ</option>
|
||||||
|
<option value="US_NYSE">US_NYSE</option>
|
||||||
|
</select>
|
||||||
|
<span id="pb-date" class="panel-date"></span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="playbook-content"><p class="empty-msg">데이터 없음</p></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- scorecard panel -->
|
||||||
|
<div class="panel">
|
||||||
|
<div class="panel-header">
|
||||||
|
<span class="panel-title">📊 일간 스코어카드</span>
|
||||||
|
<div class="panel-controls">
|
||||||
|
<select id="sc-market-select" onchange="fetchScorecard()">
|
||||||
|
<option value="KR">KR</option>
|
||||||
|
<option value="US_NASDAQ">US_NASDAQ</option>
|
||||||
|
</select>
|
||||||
|
<span id="sc-date" class="panel-date"></span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="scorecard-grid" class="scorecard-grid"><p class="empty-msg">데이터 없음</p></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- scenarios panel -->
|
||||||
|
<div class="panel">
|
||||||
|
<div class="panel-header">
|
||||||
|
<span class="panel-title">🎯 활성 시나리오 매칭</span>
|
||||||
|
<div class="panel-controls">
|
||||||
|
<select id="scen-market-select" onchange="fetchScenarios()">
|
||||||
|
<option value="KR">KR</option>
|
||||||
|
<option value="US_NASDAQ">US_NASDAQ</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="scenarios-content"><p class="empty-msg">데이터 없음</p></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- context layer panel -->
|
||||||
|
<div class="panel">
|
||||||
|
<div class="panel-header">
|
||||||
|
<span class="panel-title">🧠 컨텍스트 트리</span>
|
||||||
|
<div class="panel-controls">
|
||||||
|
<select id="ctx-layer-select" onchange="fetchContext()">
|
||||||
|
<option value="L7_REALTIME">L7_REALTIME</option>
|
||||||
|
<option value="L6_DAILY">L6_DAILY</option>
|
||||||
|
<option value="L5_WEEKLY">L5_WEEKLY</option>
|
||||||
|
<option value="L4_MONTHLY">L4_MONTHLY</option>
|
||||||
|
<option value="L3_QUARTERLY">L3_QUARTERLY</option>
|
||||||
|
<option value="L2_YEARLY">L2_YEARLY</option>
|
||||||
|
<option value="L1_LIFETIME">L1_LIFETIME</option>
|
||||||
|
</select>
|
||||||
|
<input id="ctx-limit" type="number" value="20" min="1" max="200"
|
||||||
|
style="width:60px;" onchange="fetchContext()">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="context-content"><p class="empty-msg">데이터 없음</p></div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
@@ -325,6 +463,38 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function renderCbGauge(cb) {
|
||||||
|
if (!cb) return;
|
||||||
|
const dot = document.getElementById('cb-dot');
|
||||||
|
const label = document.getElementById('cb-label');
|
||||||
|
const bar = document.getElementById('cb-bar');
|
||||||
|
|
||||||
|
const status = cb.status || 'unknown';
|
||||||
|
const threshold = cb.threshold_pct ?? -3.0;
|
||||||
|
const current = cb.current_pnl_pct;
|
||||||
|
|
||||||
|
// dot color
|
||||||
|
dot.className = `cb-dot ${status}`;
|
||||||
|
|
||||||
|
// label
|
||||||
|
if (current !== null && current !== undefined) {
|
||||||
|
const sign = current > 0 ? '+' : '';
|
||||||
|
label.textContent = `CB ${sign}${current.toFixed(2)}%`;
|
||||||
|
} else {
|
||||||
|
label.textContent = 'CB --';
|
||||||
|
}
|
||||||
|
|
||||||
|
// bar: fill = how much of the threshold has been consumed (0%=safe, 100%=tripped)
|
||||||
|
const colorMap = { ok: 'var(--accent)', warning: 'var(--warn)', tripped: 'var(--red)', unknown: 'var(--border)' };
|
||||||
|
bar.style.background = colorMap[status] || 'var(--border)';
|
||||||
|
if (current !== null && current !== undefined && threshold < 0) {
|
||||||
|
const fillPct = Math.min(Math.max((current / threshold) * 100, 0), 100);
|
||||||
|
bar.style.width = `${fillPct}%`;
|
||||||
|
} else {
|
||||||
|
bar.style.width = '0%';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function fetchStatus() {
|
async function fetchStatus() {
|
||||||
try {
|
try {
|
||||||
const r = await fetch('/api/status');
|
const r = await fetch('/api/status');
|
||||||
@@ -341,6 +511,7 @@
|
|||||||
pnlEl.className = `card-value ${n > 0 ? 'positive' : n < 0 ? 'negative' : 'neutral'}`;
|
pnlEl.className = `card-value ${n > 0 ? 'positive' : n < 0 ? 'negative' : 'neutral'}`;
|
||||||
}
|
}
|
||||||
document.getElementById('card-pnl-sub').textContent = `결정 ${t.decision_count ?? 0}건`;
|
document.getElementById('card-pnl-sub').textContent = `결정 ${t.decision_count ?? 0}건`;
|
||||||
|
renderCbGauge(d.circuit_breaker);
|
||||||
} catch {}
|
} catch {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -461,6 +632,117 @@
|
|||||||
fetchDecisions(currentMarket);
|
fetchDecisions(currentMarket);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function todayStr() {
|
||||||
|
return new Date().toISOString().slice(0, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
function esc(s) {
|
||||||
|
return String(s ?? '').replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchJSON(url) {
|
||||||
|
const r = await fetch(url);
|
||||||
|
if (!r.ok) throw new Error(`HTTP ${r.status}`);
|
||||||
|
return r.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchPlaybook() {
|
||||||
|
const market = document.getElementById('pb-market-select').value;
|
||||||
|
const date = todayStr();
|
||||||
|
document.getElementById('pb-date').textContent = date;
|
||||||
|
const el = document.getElementById('playbook-content');
|
||||||
|
try {
|
||||||
|
const data = await fetchJSON(`/api/playbook/${date}?market=${market}`);
|
||||||
|
const stocks = data.stock_playbooks ?? [];
|
||||||
|
if (stocks.length === 0) {
|
||||||
|
el.innerHTML = '<p class="empty-msg">오늘 플레이북 없음</p>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
el.innerHTML = stocks.map(sp =>
|
||||||
|
`<details><summary>${esc(sp.stock_code ?? '?')} — ${esc(sp.signal ?? '')}</summary>` +
|
||||||
|
`<pre>${esc(JSON.stringify(sp, null, 2))}</pre></details>`
|
||||||
|
).join('');
|
||||||
|
} catch {
|
||||||
|
el.innerHTML = '<p class="empty-msg">플레이북 없음 (오늘 미생성 또는 API 오류)</p>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchScorecard() {
|
||||||
|
const market = document.getElementById('sc-market-select').value;
|
||||||
|
const date = todayStr();
|
||||||
|
document.getElementById('sc-date').textContent = date;
|
||||||
|
const el = document.getElementById('scorecard-grid');
|
||||||
|
try {
|
||||||
|
const data = await fetchJSON(`/api/scorecard/${date}?market=${market}`);
|
||||||
|
const sc = data.scorecard ?? {};
|
||||||
|
const entries = Object.entries(sc);
|
||||||
|
if (entries.length === 0) {
|
||||||
|
el.innerHTML = '<p class="empty-msg">스코어카드 없음</p>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
el.className = 'scorecard-grid';
|
||||||
|
el.innerHTML = entries.map(([k, v]) => `
|
||||||
|
<div class="kpi-card">
|
||||||
|
<div class="kpi-label">${esc(k)}</div>
|
||||||
|
<div class="kpi-value">${typeof v === 'number' ? v.toFixed(2) : esc(String(v))}</div>
|
||||||
|
</div>`).join('');
|
||||||
|
} catch {
|
||||||
|
el.innerHTML = '<p class="empty-msg">스코어카드 없음 (오늘 미생성 또는 API 오류)</p>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchScenarios() {
|
||||||
|
const market = document.getElementById('scen-market-select').value;
|
||||||
|
const date = todayStr();
|
||||||
|
const el = document.getElementById('scenarios-content');
|
||||||
|
try {
|
||||||
|
const data = await fetchJSON(`/api/scenarios/active?market=${market}&date_str=${date}&limit=50`);
|
||||||
|
const matches = data.matches ?? [];
|
||||||
|
if (matches.length === 0) {
|
||||||
|
el.innerHTML = '<p class="empty-msg">활성 시나리오 없음</p>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
el.innerHTML = `<table class="scenarios-table">
|
||||||
|
<thead><tr><th>종목</th><th>신호</th><th>신뢰도</th><th>매칭 조건</th></tr></thead>
|
||||||
|
<tbody>${matches.map(m => `
|
||||||
|
<tr>
|
||||||
|
<td>${esc(m.stock_code)}</td>
|
||||||
|
<td>${esc(m.signal ?? '-')}</td>
|
||||||
|
<td>${esc(m.confidence ?? '-')}</td>
|
||||||
|
<td><code style="font-size:11px">${esc(JSON.stringify(m.scenario_match ?? {}))}</code></td>
|
||||||
|
</tr>`).join('')}
|
||||||
|
</tbody></table>`;
|
||||||
|
} catch {
|
||||||
|
el.innerHTML = '<p class="empty-msg">데이터 없음</p>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchContext() {
|
||||||
|
const layer = document.getElementById('ctx-layer-select').value;
|
||||||
|
const limit = Math.min(Math.max(parseInt(document.getElementById('ctx-limit').value, 10) || 20, 1), 200);
|
||||||
|
const el = document.getElementById('context-content');
|
||||||
|
try {
|
||||||
|
const data = await fetchJSON(`/api/context/${layer}?limit=${limit}`);
|
||||||
|
const entries = data.entries ?? [];
|
||||||
|
if (entries.length === 0) {
|
||||||
|
el.innerHTML = '<p class="empty-msg">컨텍스트 없음</p>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
el.innerHTML = `<table class="context-table">
|
||||||
|
<thead><tr><th>timeframe</th><th>key</th><th>value</th><th>updated</th></tr></thead>
|
||||||
|
<tbody>${entries.map(e => `
|
||||||
|
<tr>
|
||||||
|
<td>${esc(e.timeframe)}</td>
|
||||||
|
<td>${esc(e.key)}</td>
|
||||||
|
<td><div class="context-value">${esc(JSON.stringify(e.value ?? e.raw_value))}</div></td>
|
||||||
|
<td style="font-size:11px;color:var(--muted)">${esc((e.updated_at ?? '').slice(0, 16))}</td>
|
||||||
|
</tr>`).join('')}
|
||||||
|
</tbody></table>`;
|
||||||
|
} catch {
|
||||||
|
el.innerHTML = '<p class="empty-msg">데이터 없음</p>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function refreshAll() {
|
async function refreshAll() {
|
||||||
document.getElementById('last-updated').textContent = '업데이트 중...';
|
document.getElementById('last-updated').textContent = '업데이트 중...';
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
@@ -469,6 +751,10 @@
|
|||||||
fetchPositions(),
|
fetchPositions(),
|
||||||
fetchPnlHistory(currentDays),
|
fetchPnlHistory(currentDays),
|
||||||
fetchDecisions(currentMarket),
|
fetchDecisions(currentMarket),
|
||||||
|
fetchPlaybook(),
|
||||||
|
fetchScorecard(),
|
||||||
|
fetchScenarios(),
|
||||||
|
fetchContext(),
|
||||||
]);
|
]);
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const timeStr = now.toLocaleTimeString('ko-KR', { hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false });
|
const timeStr = now.toLocaleTimeString('ko-KR', { hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false });
|
||||||
|
|||||||
33
src/db.py
33
src/db.py
@@ -14,6 +14,11 @@ def init_db(db_path: str) -> sqlite3.Connection:
|
|||||||
if db_path != ":memory:":
|
if db_path != ":memory:":
|
||||||
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
conn = sqlite3.connect(db_path)
|
conn = sqlite3.connect(db_path)
|
||||||
|
# Enable WAL mode for concurrent read/write (dashboard + trading loop).
|
||||||
|
# WAL does not apply to in-memory databases.
|
||||||
|
if db_path != ":memory:":
|
||||||
|
conn.execute("PRAGMA journal_mode=WAL")
|
||||||
|
conn.execute("PRAGMA busy_timeout=5000")
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
CREATE TABLE IF NOT EXISTS trades (
|
CREATE TABLE IF NOT EXISTS trades (
|
||||||
@@ -28,12 +33,13 @@ def init_db(db_path: str) -> sqlite3.Connection:
|
|||||||
pnl REAL DEFAULT 0.0,
|
pnl REAL DEFAULT 0.0,
|
||||||
market TEXT DEFAULT 'KR',
|
market TEXT DEFAULT 'KR',
|
||||||
exchange_code TEXT DEFAULT 'KRX',
|
exchange_code TEXT DEFAULT 'KRX',
|
||||||
decision_id TEXT
|
decision_id TEXT,
|
||||||
|
mode TEXT DEFAULT 'paper'
|
||||||
)
|
)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
# Migration: Add market and exchange_code columns if they don't exist
|
# Migration: Add columns if they don't exist (backward-compatible schema upgrades)
|
||||||
cursor = conn.execute("PRAGMA table_info(trades)")
|
cursor = conn.execute("PRAGMA table_info(trades)")
|
||||||
columns = {row[1] for row in cursor.fetchall()}
|
columns = {row[1] for row in cursor.fetchall()}
|
||||||
|
|
||||||
@@ -45,6 +51,8 @@ def init_db(db_path: str) -> sqlite3.Connection:
|
|||||||
conn.execute("ALTER TABLE trades ADD COLUMN selection_context TEXT")
|
conn.execute("ALTER TABLE trades ADD COLUMN selection_context TEXT")
|
||||||
if "decision_id" not in columns:
|
if "decision_id" not in columns:
|
||||||
conn.execute("ALTER TABLE trades ADD COLUMN decision_id TEXT")
|
conn.execute("ALTER TABLE trades ADD COLUMN decision_id TEXT")
|
||||||
|
if "mode" not in columns:
|
||||||
|
conn.execute("ALTER TABLE trades ADD COLUMN mode TEXT DEFAULT 'paper'")
|
||||||
|
|
||||||
# Context tree tables for multi-layered memory management
|
# Context tree tables for multi-layered memory management
|
||||||
conn.execute(
|
conn.execute(
|
||||||
@@ -138,6 +146,18 @@ def init_db(db_path: str) -> sqlite3.Connection:
|
|||||||
" ON trades (stock_code, market, timestamp DESC)"
|
" ON trades (stock_code, market, timestamp DESC)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Lightweight key-value store for trading system runtime metrics (dashboard use only)
|
||||||
|
# Intentionally separate from the AI context tree to preserve separation of concerns.
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS system_metrics (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
updated_at TEXT NOT NULL
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
conn.commit()
|
conn.commit()
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
@@ -155,6 +175,7 @@ def log_trade(
|
|||||||
exchange_code: str = "KRX",
|
exchange_code: str = "KRX",
|
||||||
selection_context: dict[str, any] | None = None,
|
selection_context: dict[str, any] | None = None,
|
||||||
decision_id: str | None = None,
|
decision_id: str | None = None,
|
||||||
|
mode: str = "paper",
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Insert a trade record into the database.
|
"""Insert a trade record into the database.
|
||||||
|
|
||||||
@@ -170,6 +191,8 @@ def log_trade(
|
|||||||
market: Market code
|
market: Market code
|
||||||
exchange_code: Exchange code
|
exchange_code: Exchange code
|
||||||
selection_context: Scanner selection data (RSI, volume_ratio, signal, score)
|
selection_context: Scanner selection data (RSI, volume_ratio, signal, score)
|
||||||
|
decision_id: Unique decision identifier for audit linking
|
||||||
|
mode: Trading mode ('paper' or 'live') for data separation
|
||||||
"""
|
"""
|
||||||
# Serialize selection context to JSON
|
# Serialize selection context to JSON
|
||||||
context_json = json.dumps(selection_context) if selection_context else None
|
context_json = json.dumps(selection_context) if selection_context else None
|
||||||
@@ -178,9 +201,10 @@ def log_trade(
|
|||||||
"""
|
"""
|
||||||
INSERT INTO trades (
|
INSERT INTO trades (
|
||||||
timestamp, stock_code, action, confidence, rationale,
|
timestamp, stock_code, action, confidence, rationale,
|
||||||
quantity, price, pnl, market, exchange_code, selection_context, decision_id
|
quantity, price, pnl, market, exchange_code, selection_context, decision_id,
|
||||||
|
mode
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
""",
|
""",
|
||||||
(
|
(
|
||||||
datetime.now(UTC).isoformat(),
|
datetime.now(UTC).isoformat(),
|
||||||
@@ -195,6 +219,7 @@ def log_trade(
|
|||||||
exchange_code,
|
exchange_code,
|
||||||
context_json,
|
context_json,
|
||||||
decision_id,
|
decision_id,
|
||||||
|
mode,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|||||||
316
src/main.py
316
src/main.py
@@ -40,7 +40,7 @@ from src.evolution.daily_review import DailyReviewer
|
|||||||
from src.evolution.optimizer import EvolutionOptimizer
|
from src.evolution.optimizer import EvolutionOptimizer
|
||||||
from src.logging.decision_logger import DecisionLogger
|
from src.logging.decision_logger import DecisionLogger
|
||||||
from src.logging_config import setup_logging
|
from src.logging_config import setup_logging
|
||||||
from src.markets.schedule import MarketInfo, get_next_market_open, get_open_markets
|
from src.markets.schedule import MARKETS, MarketInfo, get_next_market_open, get_open_markets
|
||||||
from src.notifications.telegram_client import NotificationFilter, TelegramClient, TelegramCommandHandler
|
from src.notifications.telegram_client import NotificationFilter, TelegramClient, TelegramCommandHandler
|
||||||
from src.strategy.models import DayPlaybook, MarketOutlook
|
from src.strategy.models import DayPlaybook, MarketOutlook
|
||||||
from src.strategy.playbook_store import PlaybookStore
|
from src.strategy.playbook_store import PlaybookStore
|
||||||
@@ -88,6 +88,129 @@ DAILY_TRADE_SESSIONS = 4 # Number of trading sessions per day
|
|||||||
TRADE_SESSION_INTERVAL_HOURS = 6 # Hours between sessions
|
TRADE_SESSION_INTERVAL_HOURS = 6 # Hours between sessions
|
||||||
|
|
||||||
|
|
||||||
|
async def _retry_connection(coro_factory: Any, *args: Any, label: str = "", **kwargs: Any) -> Any:
|
||||||
|
"""Call an async function retrying on ConnectionError with exponential backoff.
|
||||||
|
|
||||||
|
Retries up to MAX_CONNECTION_RETRIES times (exclusive of the first attempt),
|
||||||
|
sleeping 2^attempt seconds between attempts. Use only for idempotent read
|
||||||
|
operations — never for order submission.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
coro_factory: Async callable (method or function) to invoke.
|
||||||
|
*args: Positional arguments forwarded to coro_factory.
|
||||||
|
label: Human-readable label for log messages.
|
||||||
|
**kwargs: Keyword arguments forwarded to coro_factory.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConnectionError: If all retries are exhausted.
|
||||||
|
"""
|
||||||
|
for attempt in range(1, MAX_CONNECTION_RETRIES + 1):
|
||||||
|
try:
|
||||||
|
return await coro_factory(*args, **kwargs)
|
||||||
|
except ConnectionError as exc:
|
||||||
|
if attempt < MAX_CONNECTION_RETRIES:
|
||||||
|
wait_secs = 2 ** attempt
|
||||||
|
logger.warning(
|
||||||
|
"Connection error %s (attempt %d/%d), retrying in %ds: %s",
|
||||||
|
label,
|
||||||
|
attempt,
|
||||||
|
MAX_CONNECTION_RETRIES,
|
||||||
|
wait_secs,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
await asyncio.sleep(wait_secs)
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
"Connection error %s — all %d retries exhausted: %s",
|
||||||
|
label,
|
||||||
|
MAX_CONNECTION_RETRIES,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def sync_positions_from_broker(
|
||||||
|
broker: Any,
|
||||||
|
overseas_broker: Any,
|
||||||
|
db_conn: Any,
|
||||||
|
settings: "Settings",
|
||||||
|
) -> int:
|
||||||
|
"""Sync open positions from the live broker into the local DB at startup.
|
||||||
|
|
||||||
|
Fetches current holdings from the broker for all configured markets and
|
||||||
|
inserts a synthetic BUY record for any position that the DB does not
|
||||||
|
already know about. This prevents double-buy when positions were opened
|
||||||
|
in a previous session or entered manually outside the system.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of new positions synced.
|
||||||
|
"""
|
||||||
|
synced = 0
|
||||||
|
seen_exchange_codes: set[str] = set()
|
||||||
|
|
||||||
|
for market_code in settings.enabled_market_list:
|
||||||
|
market = MARKETS.get(market_code)
|
||||||
|
if market is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
if market.is_domestic:
|
||||||
|
balance_data = await broker.get_balance()
|
||||||
|
log_market = market_code # "KR"
|
||||||
|
else:
|
||||||
|
if market.exchange_code in seen_exchange_codes:
|
||||||
|
continue
|
||||||
|
seen_exchange_codes.add(market.exchange_code)
|
||||||
|
balance_data = await overseas_broker.get_overseas_balance(
|
||||||
|
market.exchange_code
|
||||||
|
)
|
||||||
|
log_market = market_code # e.g. "US_NASDAQ"
|
||||||
|
except ConnectionError as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Startup sync: balance fetch failed for %s — skipping: %s",
|
||||||
|
market_code,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
held_codes = _extract_held_codes_from_balance(
|
||||||
|
balance_data, is_domestic=market.is_domestic
|
||||||
|
)
|
||||||
|
for stock_code in held_codes:
|
||||||
|
if get_open_position(db_conn, stock_code, log_market):
|
||||||
|
continue # already tracked
|
||||||
|
qty = _extract_held_qty_from_balance(
|
||||||
|
balance_data, stock_code, is_domestic=market.is_domestic
|
||||||
|
)
|
||||||
|
log_trade(
|
||||||
|
conn=db_conn,
|
||||||
|
stock_code=stock_code,
|
||||||
|
action="BUY",
|
||||||
|
confidence=0,
|
||||||
|
rationale="[startup-sync] Position detected from broker at startup",
|
||||||
|
quantity=qty,
|
||||||
|
price=0.0,
|
||||||
|
market=log_market,
|
||||||
|
exchange_code=market.exchange_code,
|
||||||
|
mode=settings.MODE,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Startup sync: %s/%s recorded as open position (qty=%d)",
|
||||||
|
log_market,
|
||||||
|
stock_code,
|
||||||
|
qty,
|
||||||
|
)
|
||||||
|
synced += 1
|
||||||
|
|
||||||
|
if synced:
|
||||||
|
logger.info(
|
||||||
|
"Startup sync complete: %d position(s) synced from broker", synced
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Startup sync: no new positions to sync from broker")
|
||||||
|
return synced
|
||||||
|
|
||||||
|
|
||||||
def _extract_symbol_from_holding(item: dict[str, Any]) -> str:
|
def _extract_symbol_from_holding(item: dict[str, Any]) -> str:
|
||||||
"""Extract symbol from overseas holding payload variants."""
|
"""Extract symbol from overseas holding payload variants."""
|
||||||
for key in (
|
for key in (
|
||||||
@@ -340,7 +463,13 @@ async def trading_cycle(
|
|||||||
purchase_total = safe_float(balance_info.get("frcr_buy_amt_smtl", "0") or "0")
|
purchase_total = safe_float(balance_info.get("frcr_buy_amt_smtl", "0") or "0")
|
||||||
|
|
||||||
# Paper mode fallback: VTS overseas balance API often fails for many accounts.
|
# Paper mode fallback: VTS overseas balance API often fails for many accounts.
|
||||||
if total_cash <= 0 and settings and settings.PAPER_OVERSEAS_CASH > 0:
|
# Only activate in paper mode — live mode must use real balance from KIS.
|
||||||
|
if (
|
||||||
|
total_cash <= 0
|
||||||
|
and settings
|
||||||
|
and settings.MODE == "paper"
|
||||||
|
and settings.PAPER_OVERSEAS_CASH > 0
|
||||||
|
):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Overseas cash balance is 0 for %s; using paper fallback %.2f USD",
|
"Overseas cash balance is 0 for %s; using paper fallback %.2f USD",
|
||||||
market.exchange_code,
|
market.exchange_code,
|
||||||
@@ -430,6 +559,17 @@ async def trading_cycle(
|
|||||||
{"volume_ratio": candidate.volume_ratio},
|
{"volume_ratio": candidate.volume_ratio},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Write pnl_pct to system_metrics (dashboard-only table, separate from AI context tree)
|
||||||
|
db_conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO system_metrics (key, value, updated_at) VALUES (?, ?, ?)",
|
||||||
|
(
|
||||||
|
f"portfolio_pnl_pct_{market.code}",
|
||||||
|
json.dumps({"pnl_pct": round(pnl_pct, 4)}),
|
||||||
|
datetime.now(UTC).isoformat(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
db_conn.commit()
|
||||||
|
|
||||||
# Build portfolio data for global rule evaluation
|
# Build portfolio data for global rule evaluation
|
||||||
portfolio_data = {
|
portfolio_data = {
|
||||||
"portfolio_pnl_pct": pnl_pct,
|
"portfolio_pnl_pct": pnl_pct,
|
||||||
@@ -513,6 +653,14 @@ async def trading_cycle(
|
|||||||
# BUY 결정 전 기존 포지션 체크 (중복 매수 방지)
|
# BUY 결정 전 기존 포지션 체크 (중복 매수 방지)
|
||||||
if decision.action == "BUY":
|
if decision.action == "BUY":
|
||||||
existing_position = get_open_position(db_conn, stock_code, market.code)
|
existing_position = get_open_position(db_conn, stock_code, market.code)
|
||||||
|
if not existing_position:
|
||||||
|
# SELL 지정가 접수 후 미체결 시 DB는 종료로 기록되나 브로커는 여전히 보유 중.
|
||||||
|
# 국내/해외 모두 라이브 브로커 잔고를 authoritative source로 사용.
|
||||||
|
broker_qty = _extract_held_qty_from_balance(
|
||||||
|
balance_data, stock_code, is_domestic=market.is_domestic
|
||||||
|
)
|
||||||
|
if broker_qty > 0:
|
||||||
|
existing_position = {"price": 0.0, "quantity": broker_qty}
|
||||||
if existing_position:
|
if existing_position:
|
||||||
decision = TradeDecision(
|
decision = TradeDecision(
|
||||||
action="HOLD",
|
action="HOLD",
|
||||||
@@ -712,21 +860,23 @@ async def trading_cycle(
|
|||||||
price=0, # market order
|
price=0, # market order
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# For overseas orders:
|
# For overseas orders, always use limit orders (지정가):
|
||||||
# - KIS VTS only accepts limit orders (지정가만 가능)
|
# - KIS market orders (ORD_DVSN=01) calculate quantity based on upper limit
|
||||||
# - BUY: use 0.5% premium over last price to improve fill probability
|
# price (상한가 기준), resulting in only 60-80% of intended cash being used.
|
||||||
# (ask price is typically slightly above last, and VTS won't fill below ask)
|
# - BUY: +0.2% above last price — tight enough to minimise overpayment while
|
||||||
# - SELL: use last price as the limit
|
# achieving >90% fill rate on large-cap US stocks.
|
||||||
|
# - SELL: -0.2% below last price — ensures fill even when price dips slightly
|
||||||
|
# (placing at exact last price risks no-fill if the bid is just below).
|
||||||
if decision.action == "BUY":
|
if decision.action == "BUY":
|
||||||
order_price = round(current_price * 1.005, 4)
|
order_price = round(current_price * 1.002, 4)
|
||||||
else:
|
else:
|
||||||
order_price = current_price
|
order_price = round(current_price * 0.998, 4)
|
||||||
result = await overseas_broker.send_overseas_order(
|
result = await overseas_broker.send_overseas_order(
|
||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
stock_code=stock_code,
|
stock_code=stock_code,
|
||||||
order_type=decision.action,
|
order_type=decision.action,
|
||||||
quantity=quantity,
|
quantity=quantity,
|
||||||
price=order_price, # limit order — KIS VTS rejects market orders
|
price=order_price, # limit order
|
||||||
)
|
)
|
||||||
# Check if KIS rejected the order (rt_cd != "0")
|
# Check if KIS rejected the order (rt_cd != "0")
|
||||||
if result.get("rt_cd", "") != "0":
|
if result.get("rt_cd", "") != "0":
|
||||||
@@ -803,6 +953,7 @@ async def trading_cycle(
|
|||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
selection_context=selection_context,
|
selection_context=selection_context,
|
||||||
decision_id=decision_id,
|
decision_id=decision_id,
|
||||||
|
mode=settings.MODE if settings else "paper",
|
||||||
)
|
)
|
||||||
|
|
||||||
# 7. Latency monitoring
|
# 7. Latency monitoring
|
||||||
@@ -841,18 +992,30 @@ async def run_daily_session(
|
|||||||
telegram: TelegramClient,
|
telegram: TelegramClient,
|
||||||
settings: Settings,
|
settings: Settings,
|
||||||
smart_scanner: SmartVolatilityScanner | None = None,
|
smart_scanner: SmartVolatilityScanner | None = None,
|
||||||
) -> None:
|
daily_start_eval: float = 0.0,
|
||||||
|
) -> float:
|
||||||
"""Execute one daily trading session.
|
"""Execute one daily trading session.
|
||||||
|
|
||||||
V2 proactive strategy: 1 Gemini call for playbook generation,
|
V2 proactive strategy: 1 Gemini call for playbook generation,
|
||||||
then local scenario evaluation per stock (0 API calls).
|
then local scenario evaluation per stock (0 API calls).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
daily_start_eval: Portfolio evaluation at the start of the trading day.
|
||||||
|
Used to compute intra-day P&L for the Circuit Breaker.
|
||||||
|
Pass 0.0 on the first session of each day; the function will set
|
||||||
|
it from the first balance query and return it for subsequent
|
||||||
|
sessions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The daily_start_eval value that should be forwarded to the next
|
||||||
|
session of the same trading day.
|
||||||
"""
|
"""
|
||||||
# Get currently open markets
|
# Get currently open markets
|
||||||
open_markets = get_open_markets(settings.enabled_market_list)
|
open_markets = get_open_markets(settings.enabled_market_list)
|
||||||
|
|
||||||
if not open_markets:
|
if not open_markets:
|
||||||
logger.info("No markets open for this session")
|
logger.info("No markets open for this session")
|
||||||
return
|
return daily_start_eval
|
||||||
|
|
||||||
logger.info("Starting daily trading session for %d markets", len(open_markets))
|
logger.info("Starting daily trading session for %d markets", len(open_markets))
|
||||||
|
|
||||||
@@ -938,11 +1101,18 @@ async def run_daily_session(
|
|||||||
try:
|
try:
|
||||||
if market.is_domestic:
|
if market.is_domestic:
|
||||||
current_price, price_change_pct, foreigner_net = (
|
current_price, price_change_pct, foreigner_net = (
|
||||||
await broker.get_current_price(stock_code)
|
await _retry_connection(
|
||||||
|
broker.get_current_price,
|
||||||
|
stock_code,
|
||||||
|
label=stock_code,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
price_data = await overseas_broker.get_overseas_price(
|
price_data = await _retry_connection(
|
||||||
market.exchange_code, stock_code
|
overseas_broker.get_overseas_price,
|
||||||
|
market.exchange_code,
|
||||||
|
stock_code,
|
||||||
|
label=f"{stock_code}@{market.exchange_code}",
|
||||||
)
|
)
|
||||||
current_price = safe_float(
|
current_price = safe_float(
|
||||||
price_data.get("output", {}).get("last", "0")
|
price_data.get("output", {}).get("last", "0")
|
||||||
@@ -993,9 +1163,27 @@ async def run_daily_session(
|
|||||||
logger.warning("No valid stock data for market %s", market.code)
|
logger.warning("No valid stock data for market %s", market.code)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Get balance data once for the market
|
# Get balance data once for the market (read-only — safe to retry)
|
||||||
|
try:
|
||||||
|
if market.is_domestic:
|
||||||
|
balance_data = await _retry_connection(
|
||||||
|
broker.get_balance, label=f"balance:{market.code}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
balance_data = await _retry_connection(
|
||||||
|
overseas_broker.get_overseas_balance,
|
||||||
|
market.exchange_code,
|
||||||
|
label=f"overseas_balance:{market.exchange_code}",
|
||||||
|
)
|
||||||
|
except ConnectionError as exc:
|
||||||
|
logger.error(
|
||||||
|
"Balance fetch failed for market %s after all retries — skipping market: %s",
|
||||||
|
market.code,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
if market.is_domestic:
|
if market.is_domestic:
|
||||||
balance_data = await broker.get_balance()
|
|
||||||
output2 = balance_data.get("output2", [{}])
|
output2 = balance_data.get("output2", [{}])
|
||||||
total_eval = safe_float(
|
total_eval = safe_float(
|
||||||
output2[0].get("tot_evlu_amt", "0")
|
output2[0].get("tot_evlu_amt", "0")
|
||||||
@@ -1007,7 +1195,6 @@ async def run_daily_session(
|
|||||||
output2[0].get("pchs_amt_smtl_amt", "0")
|
output2[0].get("pchs_amt_smtl_amt", "0")
|
||||||
) if output2 else 0
|
) if output2 else 0
|
||||||
else:
|
else:
|
||||||
balance_data = await overseas_broker.get_overseas_balance(market.exchange_code)
|
|
||||||
output2 = balance_data.get("output2", [{}])
|
output2 = balance_data.get("output2", [{}])
|
||||||
if isinstance(output2, list) and output2:
|
if isinstance(output2, list) and output2:
|
||||||
balance_info = output2[0]
|
balance_info = output2[0]
|
||||||
@@ -1022,19 +1209,35 @@ async def run_daily_session(
|
|||||||
balance_info.get("frcr_buy_amt_smtl", "0") or "0"
|
balance_info.get("frcr_buy_amt_smtl", "0") or "0"
|
||||||
)
|
)
|
||||||
# Paper mode fallback: VTS overseas balance API often fails for many accounts.
|
# Paper mode fallback: VTS overseas balance API often fails for many accounts.
|
||||||
if total_cash <= 0 and settings.PAPER_OVERSEAS_CASH > 0:
|
# Only activate in paper mode — live mode must use real balance from KIS.
|
||||||
|
if (
|
||||||
|
total_cash <= 0
|
||||||
|
and settings.MODE == "paper"
|
||||||
|
and settings.PAPER_OVERSEAS_CASH > 0
|
||||||
|
):
|
||||||
total_cash = settings.PAPER_OVERSEAS_CASH
|
total_cash = settings.PAPER_OVERSEAS_CASH
|
||||||
|
|
||||||
# VTS overseas balance API often returns 0; use paper fallback.
|
# Capture the day's opening portfolio value on the first market processed
|
||||||
if total_cash <= 0 and settings.PAPER_OVERSEAS_CASH > 0:
|
# in this session. Used to compute intra-day P&L for the CB instead of
|
||||||
total_cash = settings.PAPER_OVERSEAS_CASH
|
# the cumulative purchase_total which spans the entire account history.
|
||||||
|
if daily_start_eval <= 0 and total_eval > 0:
|
||||||
|
daily_start_eval = total_eval
|
||||||
|
logger.info(
|
||||||
|
"Daily CB baseline set: total_eval=%.2f (first balance of the day)",
|
||||||
|
daily_start_eval,
|
||||||
|
)
|
||||||
|
|
||||||
# Calculate daily P&L %
|
# Daily P&L: compare current eval vs start-of-day eval.
|
||||||
pnl_pct = (
|
# Falls back to purchase_total if daily_start_eval is unavailable (e.g. paper
|
||||||
((total_eval - purchase_total) / purchase_total * 100)
|
# mode where balance API returns 0 for all values).
|
||||||
if purchase_total > 0
|
if daily_start_eval > 0:
|
||||||
else 0.0
|
pnl_pct = (total_eval - daily_start_eval) / daily_start_eval * 100
|
||||||
)
|
else:
|
||||||
|
pnl_pct = (
|
||||||
|
((total_eval - purchase_total) / purchase_total * 100)
|
||||||
|
if purchase_total > 0
|
||||||
|
else 0.0
|
||||||
|
)
|
||||||
portfolio_data = {
|
portfolio_data = {
|
||||||
"portfolio_pnl_pct": pnl_pct,
|
"portfolio_pnl_pct": pnl_pct,
|
||||||
"total_cash": total_cash,
|
"total_cash": total_cash,
|
||||||
@@ -1065,6 +1268,33 @@ async def run_daily_session(
|
|||||||
decision.confidence,
|
decision.confidence,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# BUY 중복 방지: 브로커 잔고 기반 (미체결 SELL 리밋 주문 보호)
|
||||||
|
if decision.action == "BUY":
|
||||||
|
daily_existing = get_open_position(db_conn, stock_code, market.code)
|
||||||
|
if not daily_existing:
|
||||||
|
# SELL 지정가 접수 후 미체결 시 DB는 종료로 기록되나 브로커는 여전히 보유 중.
|
||||||
|
# 국내/해외 모두 라이브 브로커 잔고를 authoritative source로 사용.
|
||||||
|
broker_qty = _extract_held_qty_from_balance(
|
||||||
|
balance_data, stock_code, is_domestic=market.is_domestic
|
||||||
|
)
|
||||||
|
if broker_qty > 0:
|
||||||
|
daily_existing = {"price": 0.0, "quantity": broker_qty}
|
||||||
|
if daily_existing:
|
||||||
|
decision = TradeDecision(
|
||||||
|
action="HOLD",
|
||||||
|
confidence=decision.confidence,
|
||||||
|
rationale=(
|
||||||
|
f"Already holding {stock_code} "
|
||||||
|
f"(entry={daily_existing['price']:.4f}, "
|
||||||
|
f"qty={daily_existing['quantity']})"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"BUY suppressed for %s (%s): already holding open position",
|
||||||
|
stock_code,
|
||||||
|
market.name,
|
||||||
|
)
|
||||||
|
|
||||||
# Log decision
|
# Log decision
|
||||||
context_snapshot = {
|
context_snapshot = {
|
||||||
"L1": {
|
"L1": {
|
||||||
@@ -1272,9 +1502,11 @@ async def run_daily_session(
|
|||||||
market=market.code,
|
market=market.code,
|
||||||
exchange_code=market.exchange_code,
|
exchange_code=market.exchange_code,
|
||||||
decision_id=decision_id,
|
decision_id=decision_id,
|
||||||
|
mode=settings.MODE,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Daily trading session completed")
|
logger.info("Daily trading session completed")
|
||||||
|
return daily_start_eval
|
||||||
|
|
||||||
|
|
||||||
async def _handle_market_close(
|
async def _handle_market_close(
|
||||||
@@ -1892,6 +2124,12 @@ async def run(settings: Settings) -> None:
|
|||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning("System startup notification failed: %s", exc)
|
logger.warning("System startup notification failed: %s", exc)
|
||||||
|
|
||||||
|
# Sync broker positions → DB to prevent double-buy on restart
|
||||||
|
try:
|
||||||
|
await sync_positions_from_broker(broker, overseas_broker, db_conn, settings)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Startup position sync failed (non-fatal): %s", exc)
|
||||||
|
|
||||||
# Start command handler
|
# Start command handler
|
||||||
try:
|
try:
|
||||||
await command_handler.start_polling()
|
await command_handler.start_polling()
|
||||||
@@ -1910,13 +2148,26 @@ async def run(settings: Settings) -> None:
|
|||||||
|
|
||||||
session_interval = settings.SESSION_INTERVAL_HOURS * 3600 # Convert to seconds
|
session_interval = settings.SESSION_INTERVAL_HOURS * 3600 # Convert to seconds
|
||||||
|
|
||||||
|
# daily_start_eval: portfolio eval captured at the first session of each
|
||||||
|
# trading day. Reset on calendar-date change so the CB measures only
|
||||||
|
# today's drawdown, not cumulative account history.
|
||||||
|
_cb_daily_start_eval: float = 0.0
|
||||||
|
_cb_last_date: str = ""
|
||||||
|
|
||||||
while not shutdown.is_set():
|
while not shutdown.is_set():
|
||||||
# Wait for trading to be unpaused
|
# Wait for trading to be unpaused
|
||||||
await pause_trading.wait()
|
await pause_trading.wait()
|
||||||
_run_context_scheduler(context_scheduler, now=datetime.now(UTC))
|
_run_context_scheduler(context_scheduler, now=datetime.now(UTC))
|
||||||
|
|
||||||
|
# Reset intra-day CB baseline on a new calendar date
|
||||||
|
today_str = datetime.now(UTC).date().isoformat()
|
||||||
|
if today_str != _cb_last_date:
|
||||||
|
_cb_last_date = today_str
|
||||||
|
_cb_daily_start_eval = 0.0
|
||||||
|
logger.info("New trading day %s — daily CB baseline reset", today_str)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await run_daily_session(
|
_cb_daily_start_eval = await run_daily_session(
|
||||||
broker,
|
broker,
|
||||||
overseas_broker,
|
overseas_broker,
|
||||||
scenario_engine,
|
scenario_engine,
|
||||||
@@ -1930,9 +2181,14 @@ async def run(settings: Settings) -> None:
|
|||||||
telegram,
|
telegram,
|
||||||
settings,
|
settings,
|
||||||
smart_scanner=smart_scanner,
|
smart_scanner=smart_scanner,
|
||||||
|
daily_start_eval=_cb_daily_start_eval,
|
||||||
)
|
)
|
||||||
except CircuitBreakerTripped:
|
except CircuitBreakerTripped:
|
||||||
logger.critical("Circuit breaker tripped — shutting down")
|
logger.critical("Circuit breaker tripped — shutting down")
|
||||||
|
await telegram.notify_circuit_breaker(
|
||||||
|
pnl_pct=settings.CIRCUIT_BREAKER_PCT,
|
||||||
|
threshold=settings.CIRCUIT_BREAKER_PCT,
|
||||||
|
)
|
||||||
shutdown.set()
|
shutdown.set()
|
||||||
break
|
break
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
@@ -2250,6 +2506,8 @@ async def run(settings: Settings) -> None:
|
|||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
pass # Normal — timeout means it's time for next cycle
|
pass # Normal — timeout means it's time for next cycle
|
||||||
finally:
|
finally:
|
||||||
|
# Notify shutdown before closing resources
|
||||||
|
await telegram.notify_system_shutdown("Normal shutdown")
|
||||||
# Clean up resources
|
# Clean up resources
|
||||||
await command_handler.stop_polling()
|
await command_handler.stop_polling()
|
||||||
await broker.close()
|
await broker.close()
|
||||||
|
|||||||
114
src/strategies/v20260220_210124_evolved.py
Normal file
114
src/strategies/v20260220_210124_evolved.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
"""Auto-generated strategy: v20260220_210124
|
||||||
|
|
||||||
|
Generated at: 2026-02-20T21:01:24.706847+00:00
|
||||||
|
Rationale: Auto-evolved from 6 failures. Primary failure markets: ['US_AMEX', 'US_NYSE', 'US_NASDAQ']. Average loss: -194.69
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any
|
||||||
|
from src.strategies.base import BaseStrategy
|
||||||
|
|
||||||
|
|
||||||
|
class Strategy_v20260220_210124(BaseStrategy):
|
||||||
|
"""Strategy: v20260220_210124"""
|
||||||
|
|
||||||
|
def evaluate(self, market_data: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# --- Strategy Constants ---
|
||||||
|
# Minimum price for a stock to be considered for trading (avoids penny stocks)
|
||||||
|
MIN_PRICE = 5.0
|
||||||
|
|
||||||
|
# Momentum signal thresholds (stricter than previous failures)
|
||||||
|
MOMENTUM_PRICE_CHANGE_THRESHOLD = 7.0 # % price change
|
||||||
|
MOMENTUM_VOLUME_RATIO_THRESHOLD = 4.0 # X times average volume
|
||||||
|
|
||||||
|
# Oversold signal thresholds (more conservative)
|
||||||
|
OVERSOLD_RSI_THRESHOLD = 25.0 # RSI value (lower means more oversold)
|
||||||
|
|
||||||
|
# Confidence levels
|
||||||
|
CONFIDENCE_HOLD = 30
|
||||||
|
CONFIDENCE_BUY_OVERSOLD = 65
|
||||||
|
CONFIDENCE_BUY_MOMENTUM = 85
|
||||||
|
CONFIDENCE_BUY_STRONG_MOMENTUM = 90 # For higher-priced stocks with strong momentum
|
||||||
|
|
||||||
|
# Market hours in UTC (9:30 AM ET to 4:00 PM ET)
|
||||||
|
MARKET_OPEN_UTC = datetime.time(14, 30)
|
||||||
|
MARKET_CLOSE_UTC = datetime.time(21, 0)
|
||||||
|
|
||||||
|
# Volatile periods within market hours (UTC) to avoid
|
||||||
|
# First hour after open (14:30 UTC - 15:30 UTC)
|
||||||
|
VOLATILE_OPEN_END_UTC = datetime.time(15, 30)
|
||||||
|
# Last 30 minutes before close (20:30 UTC - 21:00 UTC)
|
||||||
|
VOLATILE_CLOSE_START_UTC = datetime.time(20, 30)
|
||||||
|
|
||||||
|
current_price = market_data.get('current_price')
|
||||||
|
price_change_pct = market_data.get('price_change_pct')
|
||||||
|
volume_ratio = market_data.get('volume_ratio') # Assumed pre-computed indicator
|
||||||
|
rsi = market_data.get('rsi') # Assumed pre-computed indicator
|
||||||
|
timestamp_str = market_data.get('timestamp')
|
||||||
|
|
||||||
|
action = "HOLD"
|
||||||
|
confidence = CONFIDENCE_HOLD
|
||||||
|
rationale = "Initial HOLD: No clear signal or conditions not met."
|
||||||
|
|
||||||
|
# --- 1. Basic Data Validation ---
|
||||||
|
if current_price is None or price_change_pct is None:
|
||||||
|
return {"action": "HOLD", "confidence": CONFIDENCE_HOLD,
|
||||||
|
"rationale": "Insufficient core data (price or price change) to evaluate."}
|
||||||
|
|
||||||
|
# --- 2. Price Filter: Avoid low-priced/penny stocks ---
|
||||||
|
if current_price < MIN_PRICE:
|
||||||
|
return {"action": "HOLD", "confidence": CONFIDENCE_HOLD,
|
||||||
|
"rationale": f"Avoiding low-priced stock (${current_price:.2f} < ${MIN_PRICE:.2f})."}
|
||||||
|
|
||||||
|
# --- 3. Time Filter: Only trade during core market hours ---
|
||||||
|
if timestamp_str:
|
||||||
|
try:
|
||||||
|
dt_object = datetime.datetime.fromisoformat(timestamp_str)
|
||||||
|
current_time_utc = dt_object.time()
|
||||||
|
|
||||||
|
if not (MARKET_OPEN_UTC <= current_time_utc < MARKET_CLOSE_UTC):
|
||||||
|
return {"action": "HOLD", "confidence": CONFIDENCE_HOLD,
|
||||||
|
"rationale": f"Avoiding trade outside core market hours ({current_time_utc} UTC)."}
|
||||||
|
|
||||||
|
if (MARKET_OPEN_UTC <= current_time_utc < VOLATILE_OPEN_END_UTC) or \
|
||||||
|
(VOLATILE_CLOSE_START_UTC <= current_time_utc < MARKET_CLOSE_UTC):
|
||||||
|
return {"action": "HOLD", "confidence": CONFIDENCE_HOLD,
|
||||||
|
"rationale": f"Avoiding trade during volatile market open/close periods ({current_time_utc} UTC)."}
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
rationale += " (Warning: Malformed timestamp, time filters skipped)"
|
||||||
|
|
||||||
|
# --- Initialize signal states ---
|
||||||
|
has_momentum_buy_signal = False
|
||||||
|
has_oversold_buy_signal = False
|
||||||
|
|
||||||
|
# --- 4. Evaluate Enhanced Buy Signals ---
|
||||||
|
|
||||||
|
# Momentum Buy Signal
|
||||||
|
if volume_ratio is not None and \
|
||||||
|
price_change_pct > MOMENTUM_PRICE_CHANGE_THRESHOLD and \
|
||||||
|
volume_ratio > MOMENTUM_VOLUME_RATIO_THRESHOLD:
|
||||||
|
has_momentum_buy_signal = True
|
||||||
|
rationale = f"Momentum BUY: Price change {price_change_pct:.2f}%, Volume {volume_ratio:.2f}x."
|
||||||
|
confidence = CONFIDENCE_BUY_MOMENTUM
|
||||||
|
if current_price >= 10.0:
|
||||||
|
confidence = CONFIDENCE_BUY_STRONG_MOMENTUM
|
||||||
|
|
||||||
|
# Oversold Buy Signal
|
||||||
|
if rsi is not None and rsi < OVERSOLD_RSI_THRESHOLD:
|
||||||
|
has_oversold_buy_signal = True
|
||||||
|
if not has_momentum_buy_signal:
|
||||||
|
rationale = f"Oversold BUY: RSI {rsi:.2f}."
|
||||||
|
confidence = CONFIDENCE_BUY_OVERSOLD
|
||||||
|
if current_price >= 10.0:
|
||||||
|
confidence = min(CONFIDENCE_BUY_OVERSOLD + 5, 80)
|
||||||
|
|
||||||
|
# --- 5. Decision Logic ---
|
||||||
|
if has_momentum_buy_signal:
|
||||||
|
action = "BUY"
|
||||||
|
elif has_oversold_buy_signal:
|
||||||
|
action = "BUY"
|
||||||
|
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
97
src/strategies/v20260220_210159_evolved.py
Normal file
97
src/strategies/v20260220_210159_evolved.py
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
"""Auto-generated strategy: v20260220_210159
|
||||||
|
|
||||||
|
Generated at: 2026-02-20T21:01:59.391523+00:00
|
||||||
|
Rationale: Auto-evolved from 6 failures. Primary failure markets: ['US_AMEX', 'US_NYSE', 'US_NASDAQ']. Average loss: -194.69
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any
|
||||||
|
from src.strategies.base import BaseStrategy
|
||||||
|
|
||||||
|
|
||||||
|
class Strategy_v20260220_210159(BaseStrategy):
|
||||||
|
"""Strategy: v20260220_210159"""
|
||||||
|
|
||||||
|
def evaluate(self, market_data: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
current_price = market_data.get('current_price')
|
||||||
|
price_change_pct = market_data.get('price_change_pct')
|
||||||
|
volume_ratio = market_data.get('volume_ratio')
|
||||||
|
rsi = market_data.get('rsi')
|
||||||
|
timestamp_str = market_data.get('timestamp')
|
||||||
|
market_name = market_data.get('market')
|
||||||
|
|
||||||
|
# Default action
|
||||||
|
action = "HOLD"
|
||||||
|
confidence = 0
|
||||||
|
rationale = "No strong signal or conditions not met."
|
||||||
|
|
||||||
|
# --- FAILURE PATTERN AVOIDANCE ---
|
||||||
|
|
||||||
|
# 1. Avoid low-priced/penny stocks
|
||||||
|
MIN_PRICE_THRESHOLD = 5.0 # USD
|
||||||
|
if current_price is not None and current_price < MIN_PRICE_THRESHOLD:
|
||||||
|
rationale = (
|
||||||
|
f"HOLD: Stock price (${current_price:.2f}) is below minimum threshold "
|
||||||
|
f"(${MIN_PRICE_THRESHOLD:.2f}). Past failures consistently involved low-priced stocks."
|
||||||
|
)
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
|
|
||||||
|
# 2. Avoid early market hour volatility
|
||||||
|
if timestamp_str:
|
||||||
|
try:
|
||||||
|
dt_obj = datetime.datetime.fromisoformat(timestamp_str)
|
||||||
|
utc_hour = dt_obj.hour
|
||||||
|
utc_minute = dt_obj.minute
|
||||||
|
|
||||||
|
if (utc_hour == 14 and utc_minute < 45) or (utc_hour == 13 and utc_minute >= 30):
|
||||||
|
rationale = (
|
||||||
|
f"HOLD: Trading during early market hours (UTC {utc_hour}:{utc_minute}), "
|
||||||
|
f"a period identified with past failures due to high volatility."
|
||||||
|
)
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# --- IMPROVED BUY STRATEGY ---
|
||||||
|
|
||||||
|
# Momentum BUY signal
|
||||||
|
if volume_ratio is not None and price_change_pct is not None:
|
||||||
|
if price_change_pct > 7.0 and volume_ratio > 3.0:
|
||||||
|
action = "BUY"
|
||||||
|
confidence = 70
|
||||||
|
rationale = "Improved BUY: Momentum signal with high volume and above price threshold."
|
||||||
|
|
||||||
|
if market_name == 'US_AMEX':
|
||||||
|
confidence = max(55, confidence - 5)
|
||||||
|
rationale += " (Adjusted lower for AMEX market's higher risk profile)."
|
||||||
|
elif market_name == 'US_NASDAQ' and price_change_pct > 20:
|
||||||
|
confidence = max(50, confidence - 10)
|
||||||
|
rationale += " (Adjusted lower for aggressive NASDAQ momentum volatility)."
|
||||||
|
|
||||||
|
if price_change_pct > 15.0:
|
||||||
|
confidence = max(50, confidence - 5)
|
||||||
|
rationale += " (Caution: Very high daily price change, potential for reversal)."
|
||||||
|
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
|
|
||||||
|
# Oversold BUY signal
|
||||||
|
if rsi is not None and price_change_pct is not None:
|
||||||
|
if rsi < 30 and price_change_pct < -3.0:
|
||||||
|
action = "BUY"
|
||||||
|
confidence = 65
|
||||||
|
rationale = "Improved BUY: Oversold signal with recent decline and above price threshold."
|
||||||
|
|
||||||
|
if market_name == 'US_AMEX':
|
||||||
|
confidence = max(50, confidence - 5)
|
||||||
|
rationale += " (Adjusted lower for AMEX market's higher risk on oversold assets)."
|
||||||
|
|
||||||
|
if price_change_pct < -10.0:
|
||||||
|
confidence = max(45, confidence - 10)
|
||||||
|
rationale += " (Caution: Very steep decline, potential falling knife)."
|
||||||
|
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
|
|
||||||
|
# If no specific BUY signal, default to HOLD
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
88
src/strategies/v20260220_210244_evolved.py
Normal file
88
src/strategies/v20260220_210244_evolved.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
"""Auto-generated strategy: v20260220_210244
|
||||||
|
|
||||||
|
Generated at: 2026-02-20T21:02:44.387355+00:00
|
||||||
|
Rationale: Auto-evolved from 6 failures. Primary failure markets: ['US_AMEX', 'US_NYSE', 'US_NASDAQ']. Average loss: -194.69
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any
|
||||||
|
from src.strategies.base import BaseStrategy
|
||||||
|
|
||||||
|
|
||||||
|
class Strategy_v20260220_210244(BaseStrategy):
|
||||||
|
"""Strategy: v20260220_210244"""
|
||||||
|
|
||||||
|
def evaluate(self, market_data: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Extract required data points safely
|
||||||
|
current_price = market_data.get("current_price")
|
||||||
|
price_change_pct = market_data.get("price_change_pct")
|
||||||
|
volume_ratio = market_data.get("volume_ratio")
|
||||||
|
rsi = market_data.get("rsi")
|
||||||
|
timestamp_str = market_data.get("timestamp")
|
||||||
|
market_name = market_data.get("market")
|
||||||
|
stock_code = market_data.get("stock_code", "UNKNOWN")
|
||||||
|
|
||||||
|
# Default action is HOLD with conservative confidence and rationale
|
||||||
|
action = "HOLD"
|
||||||
|
confidence = 50
|
||||||
|
rationale = f"No strong BUY signal for {stock_code} or awaiting more favorable conditions after avoiding known failure patterns."
|
||||||
|
|
||||||
|
# --- 1. Failure Pattern Avoidance Filters ---
|
||||||
|
|
||||||
|
# A. Avoid low-priced (penny) stocks
|
||||||
|
if current_price is not None and current_price < 5.0:
|
||||||
|
return {
|
||||||
|
"action": "HOLD",
|
||||||
|
"confidence": 50,
|
||||||
|
"rationale": f"AVOID {stock_code}: Stock price (${current_price:.2f}) is below minimum threshold ($5.00) for BUY action. Identified past failures on highly volatile, low-priced stocks."
|
||||||
|
}
|
||||||
|
|
||||||
|
# B. Avoid initiating BUY trades during identified high-volatility hours
|
||||||
|
if timestamp_str:
|
||||||
|
try:
|
||||||
|
trade_hour = datetime.fromisoformat(timestamp_str).hour
|
||||||
|
if trade_hour in [14, 20]:
|
||||||
|
return {
|
||||||
|
"action": "HOLD",
|
||||||
|
"confidence": 50,
|
||||||
|
"rationale": f"AVOID {stock_code}: Trading during historically volatile hour ({trade_hour} UTC) where previous BUYs resulted in losses. Prefer to observe market stability."
|
||||||
|
}
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# C. Be cautious with extreme momentum spikes
|
||||||
|
if volume_ratio is not None and price_change_pct is not None:
|
||||||
|
if volume_ratio >= 9.0 and price_change_pct >= 15.0:
|
||||||
|
return {
|
||||||
|
"action": "HOLD",
|
||||||
|
"confidence": 50,
|
||||||
|
"rationale": f"AVOID {stock_code}: Extreme short-term momentum detected (price change: +{price_change_pct:.2f}%, volume ratio: {volume_ratio:.1f}x). Historical failures indicate buying into such rapid spikes often leads to reversals."
|
||||||
|
}
|
||||||
|
|
||||||
|
# D. Be cautious with "oversold" signals without further confirmation
|
||||||
|
if rsi is not None and rsi < 30:
|
||||||
|
return {
|
||||||
|
"action": "HOLD",
|
||||||
|
"confidence": 50,
|
||||||
|
"rationale": f"AVOID {stock_code}: Oversold signal (RSI={rsi:.1f}) detected. While often a BUY signal, historical failures on similar 'oversold' trades suggest waiting for stronger confirmation."
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- 2. Improved BUY Signal Generation ---
|
||||||
|
if volume_ratio is not None and 2.0 <= volume_ratio < 9.0 and \
|
||||||
|
price_change_pct is not None and 2.0 <= price_change_pct < 15.0:
|
||||||
|
|
||||||
|
action = "BUY"
|
||||||
|
confidence = 70
|
||||||
|
rationale = f"BUY {stock_code}: Moderate momentum detected (price change: +{price_change_pct:.2f}%, volume ratio: {volume_ratio:.1f}x). Passed filters for price and extreme momentum, avoiding past failure patterns."
|
||||||
|
|
||||||
|
if market_name in ["US_AMEX", "US_NASDAQ"]:
|
||||||
|
confidence = max(60, confidence - 5)
|
||||||
|
rationale += f" Adjusted confidence for {market_name} market characteristics."
|
||||||
|
elif market_name == "US_NYSE":
|
||||||
|
confidence = max(65, confidence)
|
||||||
|
|
||||||
|
confidence = max(50, min(85, confidence))
|
||||||
|
|
||||||
|
return {"action": action, "confidence": confidence, "rationale": rationale}
|
||||||
@@ -3,9 +3,11 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from datetime import UTC, datetime, timedelta
|
from datetime import UTC, datetime, timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -363,3 +365,435 @@ class TestHealthMonitor:
|
|||||||
assert "timestamp" in report
|
assert "timestamp" in report
|
||||||
assert "checks" in report
|
assert "checks" in report
|
||||||
assert len(report["checks"]) == 3
|
assert len(report["checks"]) == 3
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# BackupExporter — additional coverage for previously uncovered branches
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def empty_db(tmp_path: Path) -> Path:
|
||||||
|
"""Create a temporary database with NO trade records."""
|
||||||
|
db_path = tmp_path / "empty_trades.db"
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
conn.execute(
|
||||||
|
"""CREATE TABLE trades (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp TEXT NOT NULL,
|
||||||
|
stock_code TEXT NOT NULL,
|
||||||
|
action TEXT NOT NULL,
|
||||||
|
quantity INTEGER NOT NULL,
|
||||||
|
price REAL NOT NULL,
|
||||||
|
confidence INTEGER NOT NULL,
|
||||||
|
rationale TEXT,
|
||||||
|
pnl REAL DEFAULT 0.0
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
return db_path
|
||||||
|
|
||||||
|
|
||||||
|
class TestBackupExporterAdditional:
|
||||||
|
"""Cover branches missed in the original TestBackupExporter suite."""
|
||||||
|
|
||||||
|
def test_export_all_default_formats(self, temp_db: Path, tmp_path: Path) -> None:
|
||||||
|
"""export_all with formats=None must default to JSON+CSV+Parquet path."""
|
||||||
|
exporter = BackupExporter(str(temp_db))
|
||||||
|
# formats=None triggers the default list assignment (line 62)
|
||||||
|
results = exporter.export_all(tmp_path / "out", formats=None, compress=False)
|
||||||
|
# JSON and CSV must always succeed; Parquet needs pyarrow
|
||||||
|
assert ExportFormat.JSON in results
|
||||||
|
assert ExportFormat.CSV in results
|
||||||
|
|
||||||
|
def test_export_all_logs_error_on_failure(
|
||||||
|
self, temp_db: Path, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""export_all must log an error and continue when one format fails."""
|
||||||
|
exporter = BackupExporter(str(temp_db))
|
||||||
|
# Patch _export_format to raise on JSON, succeed on CSV
|
||||||
|
original = exporter._export_format
|
||||||
|
|
||||||
|
def failing_export(fmt, *args, **kwargs): # type: ignore[no-untyped-def]
|
||||||
|
if fmt == ExportFormat.JSON:
|
||||||
|
raise RuntimeError("simulated failure")
|
||||||
|
return original(fmt, *args, **kwargs)
|
||||||
|
|
||||||
|
exporter._export_format = failing_export # type: ignore[method-assign]
|
||||||
|
results = exporter.export_all(
|
||||||
|
tmp_path / "out",
|
||||||
|
formats=[ExportFormat.JSON, ExportFormat.CSV],
|
||||||
|
compress=False,
|
||||||
|
)
|
||||||
|
# JSON failed → not in results; CSV succeeded → in results
|
||||||
|
assert ExportFormat.JSON not in results
|
||||||
|
assert ExportFormat.CSV in results
|
||||||
|
|
||||||
|
def test_export_csv_empty_trades_no_compress(
|
||||||
|
self, empty_db: Path, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""CSV export with no trades and compress=False must write header row only."""
|
||||||
|
exporter = BackupExporter(str(empty_db))
|
||||||
|
results = exporter.export_all(
|
||||||
|
tmp_path / "out",
|
||||||
|
formats=[ExportFormat.CSV],
|
||||||
|
compress=False,
|
||||||
|
)
|
||||||
|
assert ExportFormat.CSV in results
|
||||||
|
out = results[ExportFormat.CSV]
|
||||||
|
assert out.exists()
|
||||||
|
content = out.read_text()
|
||||||
|
assert "timestamp" in content
|
||||||
|
|
||||||
|
def test_export_csv_empty_trades_compressed(
|
||||||
|
self, empty_db: Path, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""CSV export with no trades and compress=True must write gzipped header."""
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
exporter = BackupExporter(str(empty_db))
|
||||||
|
results = exporter.export_all(
|
||||||
|
tmp_path / "out",
|
||||||
|
formats=[ExportFormat.CSV],
|
||||||
|
compress=True,
|
||||||
|
)
|
||||||
|
assert ExportFormat.CSV in results
|
||||||
|
out = results[ExportFormat.CSV]
|
||||||
|
assert out.suffix == ".gz"
|
||||||
|
with gzip.open(out, "rt", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
assert "timestamp" in content
|
||||||
|
|
||||||
|
def test_export_csv_with_data_compressed(
|
||||||
|
self, temp_db: Path, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""CSV export with data and compress=True must write gzipped rows."""
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
exporter = BackupExporter(str(temp_db))
|
||||||
|
results = exporter.export_all(
|
||||||
|
tmp_path / "out",
|
||||||
|
formats=[ExportFormat.CSV],
|
||||||
|
compress=True,
|
||||||
|
)
|
||||||
|
assert ExportFormat.CSV in results
|
||||||
|
out = results[ExportFormat.CSV]
|
||||||
|
with gzip.open(out, "rt", encoding="utf-8") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
# Header + 3 data rows
|
||||||
|
assert len(lines) == 4
|
||||||
|
|
||||||
|
def test_export_parquet_raises_import_error_without_pyarrow(
|
||||||
|
self, temp_db: Path, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""Parquet export must raise ImportError when pyarrow is not installed."""
|
||||||
|
exporter = BackupExporter(str(temp_db))
|
||||||
|
with patch.dict(sys.modules, {"pyarrow": None, "pyarrow.parquet": None}):
|
||||||
|
try:
|
||||||
|
import pyarrow # noqa: F401
|
||||||
|
pytest.skip("pyarrow is installed; cannot test ImportError path")
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
results = exporter.export_all(
|
||||||
|
tmp_path / "out",
|
||||||
|
formats=[ExportFormat.PARQUET],
|
||||||
|
compress=False,
|
||||||
|
)
|
||||||
|
# Parquet export fails gracefully; result dict should not contain it
|
||||||
|
assert ExportFormat.PARQUET not in results
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# CloudStorage — mocked boto3 tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_boto3_module():
|
||||||
|
"""Inject a fake boto3 into sys.modules for the duration of the test."""
|
||||||
|
mock = MagicMock()
|
||||||
|
with patch.dict(sys.modules, {"boto3": mock}):
|
||||||
|
yield mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3_config():
|
||||||
|
"""Minimal S3Config for tests."""
|
||||||
|
from src.backup.cloud_storage import S3Config
|
||||||
|
|
||||||
|
return S3Config(
|
||||||
|
endpoint_url="http://localhost:9000",
|
||||||
|
access_key="minioadmin",
|
||||||
|
secret_key="minioadmin",
|
||||||
|
bucket_name="test-bucket",
|
||||||
|
region="us-east-1",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCloudStorage:
|
||||||
|
"""Test CloudStorage using mocked boto3."""
|
||||||
|
|
||||||
|
def test_init_creates_s3_client(self, mock_boto3_module, s3_config) -> None:
|
||||||
|
"""CloudStorage.__init__ must call boto3.client with the correct args."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
mock_boto3_module.client.assert_called_once()
|
||||||
|
call_kwargs = mock_boto3_module.client.call_args[1]
|
||||||
|
assert call_kwargs["aws_access_key_id"] == "minioadmin"
|
||||||
|
assert call_kwargs["aws_secret_access_key"] == "minioadmin"
|
||||||
|
assert storage.config == s3_config
|
||||||
|
|
||||||
|
def test_init_raises_if_boto3_missing(self, s3_config) -> None:
|
||||||
|
"""CloudStorage.__init__ must raise ImportError when boto3 is absent."""
|
||||||
|
with patch.dict(sys.modules, {"boto3": None}): # type: ignore[dict-item]
|
||||||
|
with pytest.raises((ImportError, TypeError)):
|
||||||
|
# Re-import to trigger the try/except inside __init__
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
import src.backup.cloud_storage as m
|
||||||
|
|
||||||
|
importlib.reload(m)
|
||||||
|
m.CloudStorage(s3_config)
|
||||||
|
|
||||||
|
def test_upload_file_success(
|
||||||
|
self, mock_boto3_module, s3_config, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""upload_file must call client.upload_file and return the object key."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
test_file = tmp_path / "backup.json.gz"
|
||||||
|
test_file.write_bytes(b"data")
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
key = storage.upload_file(test_file, object_key="backups/backup.json.gz")
|
||||||
|
|
||||||
|
assert key == "backups/backup.json.gz"
|
||||||
|
storage.client.upload_file.assert_called_once()
|
||||||
|
|
||||||
|
def test_upload_file_default_key(
|
||||||
|
self, mock_boto3_module, s3_config, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""upload_file without object_key must use the filename as key."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
test_file = tmp_path / "myfile.gz"
|
||||||
|
test_file.write_bytes(b"data")
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
key = storage.upload_file(test_file)
|
||||||
|
|
||||||
|
assert key == "myfile.gz"
|
||||||
|
|
||||||
|
def test_upload_file_not_found(
|
||||||
|
self, mock_boto3_module, s3_config, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""upload_file must raise FileNotFoundError for missing files."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
storage.upload_file(tmp_path / "nonexistent.gz")
|
||||||
|
|
||||||
|
def test_upload_file_propagates_client_error(
|
||||||
|
self, mock_boto3_module, s3_config, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""upload_file must re-raise exceptions from the boto3 client."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
test_file = tmp_path / "backup.gz"
|
||||||
|
test_file.write_bytes(b"data")
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.upload_file.side_effect = RuntimeError("network error")
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="network error"):
|
||||||
|
storage.upload_file(test_file)
|
||||||
|
|
||||||
|
def test_download_file_success(
|
||||||
|
self, mock_boto3_module, s3_config, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""download_file must call client.download_file and return local path."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
dest = tmp_path / "downloads" / "backup.gz"
|
||||||
|
|
||||||
|
result = storage.download_file("backups/backup.gz", dest)
|
||||||
|
|
||||||
|
assert result == dest
|
||||||
|
storage.client.download_file.assert_called_once()
|
||||||
|
|
||||||
|
def test_download_file_propagates_error(
|
||||||
|
self, mock_boto3_module, s3_config, tmp_path: Path
|
||||||
|
) -> None:
|
||||||
|
"""download_file must re-raise exceptions from the boto3 client."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.download_file.side_effect = RuntimeError("timeout")
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="timeout"):
|
||||||
|
storage.download_file("key", tmp_path / "dest.gz")
|
||||||
|
|
||||||
|
def test_list_files_returns_objects(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""list_files must return parsed file metadata from S3 response."""
|
||||||
|
from datetime import timezone
|
||||||
|
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.list_objects_v2.return_value = {
|
||||||
|
"Contents": [
|
||||||
|
{
|
||||||
|
"Key": "backups/a.gz",
|
||||||
|
"Size": 1024,
|
||||||
|
"LastModified": datetime(2026, 1, 1, tzinfo=timezone.utc),
|
||||||
|
"ETag": '"abc123"',
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
files = storage.list_files(prefix="backups/")
|
||||||
|
assert len(files) == 1
|
||||||
|
assert files[0]["key"] == "backups/a.gz"
|
||||||
|
assert files[0]["size_bytes"] == 1024
|
||||||
|
|
||||||
|
def test_list_files_empty_bucket(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""list_files must return empty list when bucket has no objects."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.list_objects_v2.return_value = {}
|
||||||
|
|
||||||
|
files = storage.list_files()
|
||||||
|
assert files == []
|
||||||
|
|
||||||
|
def test_list_files_propagates_error(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""list_files must re-raise exceptions from the boto3 client."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.list_objects_v2.side_effect = RuntimeError("auth error")
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
storage.list_files()
|
||||||
|
|
||||||
|
def test_delete_file_success(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""delete_file must call client.delete_object with the correct key."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.delete_file("backups/old.gz")
|
||||||
|
storage.client.delete_object.assert_called_once_with(
|
||||||
|
Bucket="test-bucket", Key="backups/old.gz"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_delete_file_propagates_error(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""delete_file must re-raise exceptions from the boto3 client."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.delete_object.side_effect = RuntimeError("permission denied")
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
storage.delete_file("backups/old.gz")
|
||||||
|
|
||||||
|
def test_get_storage_stats_success(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""get_storage_stats must aggregate file sizes correctly."""
|
||||||
|
from datetime import timezone
|
||||||
|
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.list_objects_v2.return_value = {
|
||||||
|
"Contents": [
|
||||||
|
{
|
||||||
|
"Key": "a.gz",
|
||||||
|
"Size": 1024 * 1024,
|
||||||
|
"LastModified": datetime(2026, 1, 1, tzinfo=timezone.utc),
|
||||||
|
"ETag": '"x"',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Key": "b.gz",
|
||||||
|
"Size": 1024 * 1024,
|
||||||
|
"LastModified": datetime(2026, 1, 2, tzinfo=timezone.utc),
|
||||||
|
"ETag": '"y"',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
stats = storage.get_storage_stats()
|
||||||
|
assert stats["total_files"] == 2
|
||||||
|
assert stats["total_size_bytes"] == 2 * 1024 * 1024
|
||||||
|
assert stats["total_size_mb"] == pytest.approx(2.0)
|
||||||
|
|
||||||
|
def test_get_storage_stats_on_error(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""get_storage_stats must return error dict without raising on failure."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.list_objects_v2.side_effect = RuntimeError("no connection")
|
||||||
|
|
||||||
|
stats = storage.get_storage_stats()
|
||||||
|
assert "error" in stats
|
||||||
|
assert stats["total_files"] == 0
|
||||||
|
|
||||||
|
def test_verify_connection_success(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""verify_connection must return True when head_bucket succeeds."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
result = storage.verify_connection()
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
def test_verify_connection_failure(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""verify_connection must return False when head_bucket raises."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.head_bucket.side_effect = RuntimeError("no such bucket")
|
||||||
|
|
||||||
|
result = storage.verify_connection()
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
def test_enable_versioning(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""enable_versioning must call put_bucket_versioning."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.enable_versioning()
|
||||||
|
storage.client.put_bucket_versioning.assert_called_once()
|
||||||
|
|
||||||
|
def test_enable_versioning_propagates_error(
|
||||||
|
self, mock_boto3_module, s3_config
|
||||||
|
) -> None:
|
||||||
|
"""enable_versioning must re-raise exceptions from the boto3 client."""
|
||||||
|
from src.backup.cloud_storage import CloudStorage
|
||||||
|
|
||||||
|
storage = CloudStorage(s3_config)
|
||||||
|
storage.client.put_bucket_versioning.side_effect = RuntimeError("denied")
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError):
|
||||||
|
storage.enable_versioning()
|
||||||
|
|||||||
@@ -572,4 +572,156 @@ class TestSendOrderTickRounding:
|
|||||||
order_call = mock_post.call_args_list[1]
|
order_call = mock_post.call_args_list[1]
|
||||||
body = order_call[1].get("json", {})
|
body = order_call[1].get("json", {})
|
||||||
assert body["ORD_DVSN"] == "01"
|
assert body["ORD_DVSN"] == "01"
|
||||||
assert body["ORD_UNPR"] == "0"
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TR_ID live/paper branching (issues #201, #202, #203)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestTRIDBranchingDomestic:
|
||||||
|
"""get_balance and send_order must use correct TR_ID for live vs paper mode."""
|
||||||
|
|
||||||
|
def _make_broker(self, settings, mode: str) -> KISBroker:
|
||||||
|
from src.config import Settings
|
||||||
|
|
||||||
|
s = Settings(
|
||||||
|
KIS_APP_KEY=settings.KIS_APP_KEY,
|
||||||
|
KIS_APP_SECRET=settings.KIS_APP_SECRET,
|
||||||
|
KIS_ACCOUNT_NO=settings.KIS_ACCOUNT_NO,
|
||||||
|
GEMINI_API_KEY=settings.GEMINI_API_KEY,
|
||||||
|
DB_PATH=":memory:",
|
||||||
|
ENABLED_MARKETS="KR",
|
||||||
|
MODE=mode,
|
||||||
|
)
|
||||||
|
b = KISBroker(s)
|
||||||
|
b._access_token = "tok"
|
||||||
|
b._token_expires_at = float("inf")
|
||||||
|
b._rate_limiter.acquire = AsyncMock()
|
||||||
|
return b
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_balance_paper_uses_vttc8434r(self, settings) -> None:
|
||||||
|
broker = self._make_broker(settings, "paper")
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(
|
||||||
|
return_value={"output1": [], "output2": {}}
|
||||||
|
)
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
with patch("aiohttp.ClientSession.get", return_value=mock_resp) as mock_get:
|
||||||
|
await broker.get_balance()
|
||||||
|
|
||||||
|
headers = mock_get.call_args[1].get("headers", {})
|
||||||
|
assert headers["tr_id"] == "VTTC8434R"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_balance_live_uses_tttc8434r(self, settings) -> None:
|
||||||
|
broker = self._make_broker(settings, "live")
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(
|
||||||
|
return_value={"output1": [], "output2": {}}
|
||||||
|
)
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
with patch("aiohttp.ClientSession.get", return_value=mock_resp) as mock_get:
|
||||||
|
await broker.get_balance()
|
||||||
|
|
||||||
|
headers = mock_get.call_args[1].get("headers", {})
|
||||||
|
assert headers["tr_id"] == "TTTC8434R"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_order_buy_paper_uses_vttc0012u(self, settings) -> None:
|
||||||
|
broker = self._make_broker(settings, "paper")
|
||||||
|
mock_hash = AsyncMock()
|
||||||
|
mock_hash.status = 200
|
||||||
|
mock_hash.json = AsyncMock(return_value={"HASH": "h"})
|
||||||
|
mock_hash.__aenter__ = AsyncMock(return_value=mock_hash)
|
||||||
|
mock_hash.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_order = AsyncMock()
|
||||||
|
mock_order.status = 200
|
||||||
|
mock_order.json = AsyncMock(return_value={"rt_cd": "0"})
|
||||||
|
mock_order.__aenter__ = AsyncMock(return_value=mock_order)
|
||||||
|
mock_order.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"aiohttp.ClientSession.post", side_effect=[mock_hash, mock_order]
|
||||||
|
) as mock_post:
|
||||||
|
await broker.send_order("005930", "BUY", 1)
|
||||||
|
|
||||||
|
order_headers = mock_post.call_args_list[1][1].get("headers", {})
|
||||||
|
assert order_headers["tr_id"] == "VTTC0012U"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_order_buy_live_uses_tttc0012u(self, settings) -> None:
|
||||||
|
broker = self._make_broker(settings, "live")
|
||||||
|
mock_hash = AsyncMock()
|
||||||
|
mock_hash.status = 200
|
||||||
|
mock_hash.json = AsyncMock(return_value={"HASH": "h"})
|
||||||
|
mock_hash.__aenter__ = AsyncMock(return_value=mock_hash)
|
||||||
|
mock_hash.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_order = AsyncMock()
|
||||||
|
mock_order.status = 200
|
||||||
|
mock_order.json = AsyncMock(return_value={"rt_cd": "0"})
|
||||||
|
mock_order.__aenter__ = AsyncMock(return_value=mock_order)
|
||||||
|
mock_order.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"aiohttp.ClientSession.post", side_effect=[mock_hash, mock_order]
|
||||||
|
) as mock_post:
|
||||||
|
await broker.send_order("005930", "BUY", 1)
|
||||||
|
|
||||||
|
order_headers = mock_post.call_args_list[1][1].get("headers", {})
|
||||||
|
assert order_headers["tr_id"] == "TTTC0012U"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_order_sell_paper_uses_vttc0011u(self, settings) -> None:
|
||||||
|
broker = self._make_broker(settings, "paper")
|
||||||
|
mock_hash = AsyncMock()
|
||||||
|
mock_hash.status = 200
|
||||||
|
mock_hash.json = AsyncMock(return_value={"HASH": "h"})
|
||||||
|
mock_hash.__aenter__ = AsyncMock(return_value=mock_hash)
|
||||||
|
mock_hash.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_order = AsyncMock()
|
||||||
|
mock_order.status = 200
|
||||||
|
mock_order.json = AsyncMock(return_value={"rt_cd": "0"})
|
||||||
|
mock_order.__aenter__ = AsyncMock(return_value=mock_order)
|
||||||
|
mock_order.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"aiohttp.ClientSession.post", side_effect=[mock_hash, mock_order]
|
||||||
|
) as mock_post:
|
||||||
|
await broker.send_order("005930", "SELL", 1)
|
||||||
|
|
||||||
|
order_headers = mock_post.call_args_list[1][1].get("headers", {})
|
||||||
|
assert order_headers["tr_id"] == "VTTC0011U"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_order_sell_live_uses_tttc0011u(self, settings) -> None:
|
||||||
|
broker = self._make_broker(settings, "live")
|
||||||
|
mock_hash = AsyncMock()
|
||||||
|
mock_hash.status = 200
|
||||||
|
mock_hash.json = AsyncMock(return_value={"HASH": "h"})
|
||||||
|
mock_hash.__aenter__ = AsyncMock(return_value=mock_hash)
|
||||||
|
mock_hash.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_order = AsyncMock()
|
||||||
|
mock_order.status = 200
|
||||||
|
mock_order.json = AsyncMock(return_value={"rt_cd": "0"})
|
||||||
|
mock_order.__aenter__ = AsyncMock(return_value=mock_order)
|
||||||
|
mock_order.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"aiohttp.ClientSession.post", side_effect=[mock_hash, mock_order]
|
||||||
|
) as mock_post:
|
||||||
|
await broker.send_order("005930", "SELL", 1)
|
||||||
|
|
||||||
|
order_headers = mock_post.call_args_list[1][1].get("headers", {})
|
||||||
|
assert order_headers["tr_id"] == "TTTC0011U"
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import pytest
|
|||||||
from src.context.aggregator import ContextAggregator
|
from src.context.aggregator import ContextAggregator
|
||||||
from src.context.layer import LAYER_CONFIG, ContextLayer
|
from src.context.layer import LAYER_CONFIG, ContextLayer
|
||||||
from src.context.store import ContextStore
|
from src.context.store import ContextStore
|
||||||
|
from src.context.summarizer import ContextSummarizer
|
||||||
from src.db import init_db, log_trade
|
from src.db import init_db, log_trade
|
||||||
|
|
||||||
|
|
||||||
@@ -370,3 +371,259 @@ class TestLayerMetadata:
|
|||||||
|
|
||||||
# L1 aggregates from L2
|
# L1 aggregates from L2
|
||||||
assert LAYER_CONFIG[ContextLayer.L1_LEGACY].aggregation_source == ContextLayer.L2_ANNUAL
|
assert LAYER_CONFIG[ContextLayer.L1_LEGACY].aggregation_source == ContextLayer.L2_ANNUAL
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# ContextSummarizer tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def summarizer(db_conn: sqlite3.Connection) -> ContextSummarizer:
|
||||||
|
"""Provide a ContextSummarizer backed by an in-memory store."""
|
||||||
|
return ContextSummarizer(ContextStore(db_conn))
|
||||||
|
|
||||||
|
|
||||||
|
class TestContextSummarizer:
|
||||||
|
"""Test suite for ContextSummarizer."""
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# summarize_numeric_values
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_summarize_empty_values(self, summarizer: ContextSummarizer) -> None:
|
||||||
|
"""Empty list must return SummaryStats with count=0 and no other fields."""
|
||||||
|
stats = summarizer.summarize_numeric_values([])
|
||||||
|
assert stats.count == 0
|
||||||
|
assert stats.mean is None
|
||||||
|
assert stats.min is None
|
||||||
|
assert stats.max is None
|
||||||
|
|
||||||
|
def test_summarize_single_value(self, summarizer: ContextSummarizer) -> None:
|
||||||
|
"""Single-element list must return correct stats with std=0 and trend=flat."""
|
||||||
|
stats = summarizer.summarize_numeric_values([42.0])
|
||||||
|
assert stats.count == 1
|
||||||
|
assert stats.mean == 42.0
|
||||||
|
assert stats.std == 0.0
|
||||||
|
assert stats.trend == "flat"
|
||||||
|
|
||||||
|
def test_summarize_upward_trend(self, summarizer: ContextSummarizer) -> None:
|
||||||
|
"""Increasing values must produce trend='up'."""
|
||||||
|
values = [1.0, 2.0, 3.0, 10.0, 20.0, 30.0]
|
||||||
|
stats = summarizer.summarize_numeric_values(values)
|
||||||
|
assert stats.trend == "up"
|
||||||
|
|
||||||
|
def test_summarize_downward_trend(self, summarizer: ContextSummarizer) -> None:
|
||||||
|
"""Decreasing values must produce trend='down'."""
|
||||||
|
values = [30.0, 20.0, 10.0, 3.0, 2.0, 1.0]
|
||||||
|
stats = summarizer.summarize_numeric_values(values)
|
||||||
|
assert stats.trend == "down"
|
||||||
|
|
||||||
|
def test_summarize_flat_trend(self, summarizer: ContextSummarizer) -> None:
|
||||||
|
"""Stable values must produce trend='flat'."""
|
||||||
|
values = [100.0, 100.1, 99.9, 100.0, 100.2, 99.8]
|
||||||
|
stats = summarizer.summarize_numeric_values(values)
|
||||||
|
assert stats.trend == "flat"
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# summarize_layer
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_summarize_layer_no_data(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""summarize_layer with no data must return the 'No data' sentinel."""
|
||||||
|
result = summarizer.summarize_layer(ContextLayer.L6_DAILY)
|
||||||
|
assert result["count"] == 0
|
||||||
|
assert "No data" in result["summary"]
|
||||||
|
|
||||||
|
def test_summarize_layer_numeric(
|
||||||
|
self, summarizer: ContextSummarizer, db_conn: sqlite3.Connection
|
||||||
|
) -> None:
|
||||||
|
"""summarize_layer must collect numeric values and produce stats."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "total_pnl", 100.0)
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-02", "total_pnl", 200.0)
|
||||||
|
|
||||||
|
result = summarizer.summarize_layer(ContextLayer.L6_DAILY)
|
||||||
|
assert "total_entries" in result
|
||||||
|
|
||||||
|
def test_summarize_layer_with_dict_values(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""summarize_layer must handle dict values by extracting numeric subkeys."""
|
||||||
|
store = summarizer.store
|
||||||
|
# set_context serialises the value as JSON, so passing a dict works
|
||||||
|
store.set_context(
|
||||||
|
ContextLayer.L6_DAILY, "2026-02-01", "metrics",
|
||||||
|
{"win_rate": 65.0, "label": "good"}
|
||||||
|
)
|
||||||
|
|
||||||
|
result = summarizer.summarize_layer(ContextLayer.L6_DAILY)
|
||||||
|
assert "total_entries" in result
|
||||||
|
# numeric subkey "win_rate" should appear as "metrics.win_rate"
|
||||||
|
assert "metrics.win_rate" in result
|
||||||
|
|
||||||
|
def test_summarize_layer_with_string_values(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""summarize_layer must count string values separately."""
|
||||||
|
store = summarizer.store
|
||||||
|
# set_context stores string values as JSON-encoded strings
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "outlook", "BULLISH")
|
||||||
|
|
||||||
|
result = summarizer.summarize_layer(ContextLayer.L6_DAILY)
|
||||||
|
# String fields contribute a `<key>_count` entry
|
||||||
|
assert "outlook_count" in result
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# rolling_window_summary
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_rolling_window_summary_basic(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""rolling_window_summary must return the expected structure."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "pnl", 500.0)
|
||||||
|
|
||||||
|
result = summarizer.rolling_window_summary(ContextLayer.L6_DAILY)
|
||||||
|
assert "window_days" in result
|
||||||
|
assert "recent_data" in result
|
||||||
|
assert "historical_summary" in result
|
||||||
|
|
||||||
|
def test_rolling_window_summary_no_older_data(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""rolling_window_summary with summarize_older=False skips history."""
|
||||||
|
result = summarizer.rolling_window_summary(
|
||||||
|
ContextLayer.L6_DAILY, summarize_older=False
|
||||||
|
)
|
||||||
|
assert result["historical_summary"] == {}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# aggregate_to_higher_layer
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_aggregate_to_higher_layer_mean(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""aggregate_to_higher_layer with 'mean' via dict subkeys returns average."""
|
||||||
|
store = summarizer.store
|
||||||
|
# Use different outer keys but same inner metric key so get_all_contexts
|
||||||
|
# returns multiple rows with the target subkey.
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day1", {"pnl": 100.0})
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day2", {"pnl": 200.0})
|
||||||
|
|
||||||
|
result = summarizer.aggregate_to_higher_layer(
|
||||||
|
ContextLayer.L6_DAILY, ContextLayer.L5_WEEKLY, "pnl", "mean"
|
||||||
|
)
|
||||||
|
assert result == pytest.approx(150.0)
|
||||||
|
|
||||||
|
def test_aggregate_to_higher_layer_sum(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""aggregate_to_higher_layer with 'sum' must return the total."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day1", {"pnl": 100.0})
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day2", {"pnl": 200.0})
|
||||||
|
|
||||||
|
result = summarizer.aggregate_to_higher_layer(
|
||||||
|
ContextLayer.L6_DAILY, ContextLayer.L5_WEEKLY, "pnl", "sum"
|
||||||
|
)
|
||||||
|
assert result == pytest.approx(300.0)
|
||||||
|
|
||||||
|
def test_aggregate_to_higher_layer_max(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""aggregate_to_higher_layer with 'max' must return the maximum."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day1", {"pnl": 100.0})
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day2", {"pnl": 200.0})
|
||||||
|
|
||||||
|
result = summarizer.aggregate_to_higher_layer(
|
||||||
|
ContextLayer.L6_DAILY, ContextLayer.L5_WEEKLY, "pnl", "max"
|
||||||
|
)
|
||||||
|
assert result == pytest.approx(200.0)
|
||||||
|
|
||||||
|
def test_aggregate_to_higher_layer_min(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""aggregate_to_higher_layer with 'min' must return the minimum."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day1", {"pnl": 100.0})
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day2", {"pnl": 200.0})
|
||||||
|
|
||||||
|
result = summarizer.aggregate_to_higher_layer(
|
||||||
|
ContextLayer.L6_DAILY, ContextLayer.L5_WEEKLY, "pnl", "min"
|
||||||
|
)
|
||||||
|
assert result == pytest.approx(100.0)
|
||||||
|
|
||||||
|
def test_aggregate_to_higher_layer_no_data(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""aggregate_to_higher_layer with no matching key must return None."""
|
||||||
|
result = summarizer.aggregate_to_higher_layer(
|
||||||
|
ContextLayer.L6_DAILY, ContextLayer.L5_WEEKLY, "nonexistent", "mean"
|
||||||
|
)
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
def test_aggregate_to_higher_layer_unknown_func_defaults_to_mean(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""Unknown aggregation function must fall back to mean."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day1", {"pnl": 100.0})
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "day2", {"pnl": 200.0})
|
||||||
|
|
||||||
|
result = summarizer.aggregate_to_higher_layer(
|
||||||
|
ContextLayer.L6_DAILY, ContextLayer.L5_WEEKLY, "pnl", "unknown_func"
|
||||||
|
)
|
||||||
|
assert result == pytest.approx(150.0)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# create_compact_summary + format_summary_for_prompt
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_create_compact_summary(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""create_compact_summary must produce a dict keyed by layer value."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "pnl", 100.0)
|
||||||
|
|
||||||
|
result = summarizer.create_compact_summary([ContextLayer.L6_DAILY])
|
||||||
|
assert ContextLayer.L6_DAILY.value in result
|
||||||
|
|
||||||
|
def test_format_summary_for_prompt_with_numeric_metrics(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""format_summary_for_prompt must render avg/trend fields."""
|
||||||
|
store = summarizer.store
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-01", "pnl", 100.0)
|
||||||
|
store.set_context(ContextLayer.L6_DAILY, "2026-02-02", "pnl", 200.0)
|
||||||
|
|
||||||
|
compact = summarizer.create_compact_summary([ContextLayer.L6_DAILY])
|
||||||
|
text = summarizer.format_summary_for_prompt(compact)
|
||||||
|
assert isinstance(text, str)
|
||||||
|
|
||||||
|
def test_format_summary_for_prompt_skips_empty_layers(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""format_summary_for_prompt must skip layers with no metrics."""
|
||||||
|
summary = {ContextLayer.L6_DAILY.value: {}}
|
||||||
|
text = summarizer.format_summary_for_prompt(summary)
|
||||||
|
assert text == ""
|
||||||
|
|
||||||
|
def test_format_summary_non_dict_value(
|
||||||
|
self, summarizer: ContextSummarizer
|
||||||
|
) -> None:
|
||||||
|
"""format_summary_for_prompt must render non-dict values as plain text."""
|
||||||
|
summary = {
|
||||||
|
"daily": {
|
||||||
|
"plain_count": 42,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
text = summarizer.format_summary_for_prompt(summary)
|
||||||
|
assert "plain_count" in text
|
||||||
|
assert "42" in text
|
||||||
|
|||||||
@@ -351,3 +351,65 @@ def test_positions_empty_when_no_trades(tmp_path: Path) -> None:
|
|||||||
body = get_positions()
|
body = get_positions()
|
||||||
assert body["count"] == 0
|
assert body["count"] == 0
|
||||||
assert body["positions"] == []
|
assert body["positions"] == []
|
||||||
|
|
||||||
|
|
||||||
|
def _seed_cb_context(conn: sqlite3.Connection, pnl_pct: float, market: str = "KR") -> None:
|
||||||
|
import json as _json
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO system_metrics (key, value, updated_at) VALUES (?, ?, ?)",
|
||||||
|
(
|
||||||
|
f"portfolio_pnl_pct_{market}",
|
||||||
|
_json.dumps({"pnl_pct": pnl_pct}),
|
||||||
|
"2026-02-22T10:00:00+00:00",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_circuit_breaker_ok(tmp_path: Path) -> None:
|
||||||
|
"""pnl_pct가 -2.0%보다 높으면 status=ok를 반환해야 한다."""
|
||||||
|
db_path = tmp_path / "cb_ok.db"
|
||||||
|
conn = init_db(str(db_path))
|
||||||
|
_seed_cb_context(conn, -1.0)
|
||||||
|
conn.close()
|
||||||
|
app = create_dashboard_app(str(db_path))
|
||||||
|
get_status = _endpoint(app, "/api/status")
|
||||||
|
body = get_status()
|
||||||
|
cb = body["circuit_breaker"]
|
||||||
|
assert cb["status"] == "ok"
|
||||||
|
assert cb["current_pnl_pct"] == -1.0
|
||||||
|
assert cb["threshold_pct"] == -3.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_circuit_breaker_warning(tmp_path: Path) -> None:
|
||||||
|
"""pnl_pct가 -2.0% 이하이면 status=warning을 반환해야 한다."""
|
||||||
|
db_path = tmp_path / "cb_warn.db"
|
||||||
|
conn = init_db(str(db_path))
|
||||||
|
_seed_cb_context(conn, -2.5)
|
||||||
|
conn.close()
|
||||||
|
app = create_dashboard_app(str(db_path))
|
||||||
|
get_status = _endpoint(app, "/api/status")
|
||||||
|
body = get_status()
|
||||||
|
assert body["circuit_breaker"]["status"] == "warning"
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_circuit_breaker_tripped(tmp_path: Path) -> None:
|
||||||
|
"""pnl_pct가 임계값(-3.0%) 이하이면 status=tripped를 반환해야 한다."""
|
||||||
|
db_path = tmp_path / "cb_tripped.db"
|
||||||
|
conn = init_db(str(db_path))
|
||||||
|
_seed_cb_context(conn, -3.5)
|
||||||
|
conn.close()
|
||||||
|
app = create_dashboard_app(str(db_path))
|
||||||
|
get_status = _endpoint(app, "/api/status")
|
||||||
|
body = get_status()
|
||||||
|
assert body["circuit_breaker"]["status"] == "tripped"
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_circuit_breaker_unknown_when_no_data(tmp_path: Path) -> None:
|
||||||
|
"""L7 context에 pnl_pct 데이터가 없으면 status=unknown을 반환해야 한다."""
|
||||||
|
app = _app(tmp_path) # seed_db에는 portfolio_pnl_pct 없음
|
||||||
|
get_status = _endpoint(app, "/api/status")
|
||||||
|
body = get_status()
|
||||||
|
cb = body["circuit_breaker"]
|
||||||
|
assert cb["status"] == "unknown"
|
||||||
|
assert cb["current_pnl_pct"] is None
|
||||||
|
|||||||
135
tests/test_db.py
135
tests/test_db.py
@@ -1,5 +1,8 @@
|
|||||||
"""Tests for database helper functions."""
|
"""Tests for database helper functions."""
|
||||||
|
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
from src.db import get_open_position, init_db, log_trade
|
from src.db import get_open_position, init_db, log_trade
|
||||||
|
|
||||||
|
|
||||||
@@ -58,3 +61,135 @@ def test_get_open_position_returns_none_when_latest_is_sell() -> None:
|
|||||||
def test_get_open_position_returns_none_when_no_trades() -> None:
|
def test_get_open_position_returns_none_when_no_trades() -> None:
|
||||||
conn = init_db(":memory:")
|
conn = init_db(":memory:")
|
||||||
assert get_open_position(conn, "AAPL", "US_NASDAQ") is None
|
assert get_open_position(conn, "AAPL", "US_NASDAQ") is None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# WAL mode tests (issue #210)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_wal_mode_applied_to_file_db() -> None:
|
||||||
|
"""File-based DB must use WAL journal mode for dashboard concurrent reads."""
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
||||||
|
db_path = f.name
|
||||||
|
try:
|
||||||
|
conn = init_db(db_path)
|
||||||
|
cursor = conn.execute("PRAGMA journal_mode")
|
||||||
|
mode = cursor.fetchone()[0]
|
||||||
|
assert mode == "wal", f"Expected WAL mode, got {mode}"
|
||||||
|
conn.close()
|
||||||
|
finally:
|
||||||
|
os.unlink(db_path)
|
||||||
|
# Clean up WAL auxiliary files if they exist
|
||||||
|
for ext in ("-wal", "-shm"):
|
||||||
|
path = db_path + ext
|
||||||
|
if os.path.exists(path):
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_wal_mode_not_applied_to_memory_db() -> None:
|
||||||
|
""":memory: DB must not apply WAL (SQLite does not support WAL for in-memory)."""
|
||||||
|
conn = init_db(":memory:")
|
||||||
|
cursor = conn.execute("PRAGMA journal_mode")
|
||||||
|
mode = cursor.fetchone()[0]
|
||||||
|
# In-memory DBs default to 'memory' journal mode
|
||||||
|
assert mode != "wal", "WAL should not be set on in-memory database"
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# mode column tests (issue #212)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def test_log_trade_stores_mode_paper() -> None:
|
||||||
|
"""log_trade must persist mode='paper' in the trades table."""
|
||||||
|
conn = init_db(":memory:")
|
||||||
|
log_trade(
|
||||||
|
conn=conn,
|
||||||
|
stock_code="005930",
|
||||||
|
action="BUY",
|
||||||
|
confidence=85,
|
||||||
|
rationale="test",
|
||||||
|
mode="paper",
|
||||||
|
)
|
||||||
|
row = conn.execute("SELECT mode FROM trades ORDER BY id DESC LIMIT 1").fetchone()
|
||||||
|
assert row is not None
|
||||||
|
assert row[0] == "paper"
|
||||||
|
|
||||||
|
|
||||||
|
def test_log_trade_stores_mode_live() -> None:
|
||||||
|
"""log_trade must persist mode='live' in the trades table."""
|
||||||
|
conn = init_db(":memory:")
|
||||||
|
log_trade(
|
||||||
|
conn=conn,
|
||||||
|
stock_code="005930",
|
||||||
|
action="BUY",
|
||||||
|
confidence=85,
|
||||||
|
rationale="test",
|
||||||
|
mode="live",
|
||||||
|
)
|
||||||
|
row = conn.execute("SELECT mode FROM trades ORDER BY id DESC LIMIT 1").fetchone()
|
||||||
|
assert row is not None
|
||||||
|
assert row[0] == "live"
|
||||||
|
|
||||||
|
|
||||||
|
def test_log_trade_default_mode_is_paper() -> None:
|
||||||
|
"""log_trade without explicit mode must default to 'paper'."""
|
||||||
|
conn = init_db(":memory:")
|
||||||
|
log_trade(
|
||||||
|
conn=conn,
|
||||||
|
stock_code="005930",
|
||||||
|
action="HOLD",
|
||||||
|
confidence=50,
|
||||||
|
rationale="test",
|
||||||
|
)
|
||||||
|
row = conn.execute("SELECT mode FROM trades ORDER BY id DESC LIMIT 1").fetchone()
|
||||||
|
assert row is not None
|
||||||
|
assert row[0] == "paper"
|
||||||
|
|
||||||
|
|
||||||
|
def test_mode_column_exists_in_schema() -> None:
|
||||||
|
"""trades table must have a mode column after init_db."""
|
||||||
|
conn = init_db(":memory:")
|
||||||
|
cursor = conn.execute("PRAGMA table_info(trades)")
|
||||||
|
columns = {row[1] for row in cursor.fetchall()}
|
||||||
|
assert "mode" in columns
|
||||||
|
|
||||||
|
|
||||||
|
def test_mode_migration_adds_column_to_existing_db() -> None:
|
||||||
|
"""init_db must add mode column to existing DBs that lack it (migration)."""
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
||||||
|
db_path = f.name
|
||||||
|
try:
|
||||||
|
# Create DB without mode column (simulate old schema)
|
||||||
|
old_conn = sqlite3.connect(db_path)
|
||||||
|
old_conn.execute(
|
||||||
|
"""CREATE TABLE trades (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp TEXT NOT NULL,
|
||||||
|
stock_code TEXT NOT NULL,
|
||||||
|
action TEXT NOT NULL,
|
||||||
|
confidence INTEGER NOT NULL,
|
||||||
|
rationale TEXT,
|
||||||
|
quantity INTEGER,
|
||||||
|
price REAL,
|
||||||
|
pnl REAL DEFAULT 0.0,
|
||||||
|
market TEXT DEFAULT 'KR',
|
||||||
|
exchange_code TEXT DEFAULT 'KRX',
|
||||||
|
decision_id TEXT
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
old_conn.commit()
|
||||||
|
old_conn.close()
|
||||||
|
|
||||||
|
# Run init_db — should add mode column via migration
|
||||||
|
conn = init_db(db_path)
|
||||||
|
cursor = conn.execute("PRAGMA table_info(trades)")
|
||||||
|
columns = {row[1] for row in cursor.fetchall()}
|
||||||
|
assert "mode" in columns
|
||||||
|
conn.close()
|
||||||
|
finally:
|
||||||
|
os.unlink(db_path)
|
||||||
|
|||||||
117
tests/test_logging_config.py
Normal file
117
tests/test_logging_config.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
"""Tests for JSON structured logging configuration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from src.logging_config import JSONFormatter, setup_logging
|
||||||
|
|
||||||
|
|
||||||
|
class TestJSONFormatter:
|
||||||
|
"""Test JSONFormatter output."""
|
||||||
|
|
||||||
|
def test_basic_log_record(self) -> None:
|
||||||
|
"""JSONFormatter must emit valid JSON with required fields."""
|
||||||
|
formatter = JSONFormatter()
|
||||||
|
record = logging.LogRecord(
|
||||||
|
name="test.logger",
|
||||||
|
level=logging.INFO,
|
||||||
|
pathname="",
|
||||||
|
lineno=0,
|
||||||
|
msg="Hello %s",
|
||||||
|
args=("world",),
|
||||||
|
exc_info=None,
|
||||||
|
)
|
||||||
|
output = formatter.format(record)
|
||||||
|
data = json.loads(output)
|
||||||
|
assert data["level"] == "INFO"
|
||||||
|
assert data["logger"] == "test.logger"
|
||||||
|
assert data["message"] == "Hello world"
|
||||||
|
assert "timestamp" in data
|
||||||
|
|
||||||
|
def test_includes_exception_info(self) -> None:
|
||||||
|
"""JSONFormatter must include exception info when present."""
|
||||||
|
formatter = JSONFormatter()
|
||||||
|
try:
|
||||||
|
raise ValueError("test error")
|
||||||
|
except ValueError:
|
||||||
|
exc_info = sys.exc_info()
|
||||||
|
record = logging.LogRecord(
|
||||||
|
name="test",
|
||||||
|
level=logging.ERROR,
|
||||||
|
pathname="",
|
||||||
|
lineno=0,
|
||||||
|
msg="oops",
|
||||||
|
args=(),
|
||||||
|
exc_info=exc_info,
|
||||||
|
)
|
||||||
|
output = formatter.format(record)
|
||||||
|
data = json.loads(output)
|
||||||
|
assert "exception" in data
|
||||||
|
assert "ValueError" in data["exception"]
|
||||||
|
|
||||||
|
def test_extra_trading_fields_included(self) -> None:
|
||||||
|
"""Extra trading fields attached to the record must appear in JSON."""
|
||||||
|
formatter = JSONFormatter()
|
||||||
|
record = logging.LogRecord(
|
||||||
|
name="test",
|
||||||
|
level=logging.INFO,
|
||||||
|
pathname="",
|
||||||
|
lineno=0,
|
||||||
|
msg="trade",
|
||||||
|
args=(),
|
||||||
|
exc_info=None,
|
||||||
|
)
|
||||||
|
record.stock_code = "005930" # type: ignore[attr-defined]
|
||||||
|
record.action = "BUY" # type: ignore[attr-defined]
|
||||||
|
record.confidence = 85 # type: ignore[attr-defined]
|
||||||
|
record.pnl_pct = -1.5 # type: ignore[attr-defined]
|
||||||
|
record.order_amount = 1_000_000 # type: ignore[attr-defined]
|
||||||
|
output = formatter.format(record)
|
||||||
|
data = json.loads(output)
|
||||||
|
assert data["stock_code"] == "005930"
|
||||||
|
assert data["action"] == "BUY"
|
||||||
|
assert data["confidence"] == 85
|
||||||
|
assert data["pnl_pct"] == -1.5
|
||||||
|
assert data["order_amount"] == 1_000_000
|
||||||
|
|
||||||
|
def test_none_extra_fields_excluded(self) -> None:
|
||||||
|
"""Extra fields that are None must not appear in JSON output."""
|
||||||
|
formatter = JSONFormatter()
|
||||||
|
record = logging.LogRecord(
|
||||||
|
name="test",
|
||||||
|
level=logging.INFO,
|
||||||
|
pathname="",
|
||||||
|
lineno=0,
|
||||||
|
msg="no extras",
|
||||||
|
args=(),
|
||||||
|
exc_info=None,
|
||||||
|
)
|
||||||
|
output = formatter.format(record)
|
||||||
|
data = json.loads(output)
|
||||||
|
assert "stock_code" not in data
|
||||||
|
assert "action" not in data
|
||||||
|
assert "confidence" not in data
|
||||||
|
|
||||||
|
|
||||||
|
class TestSetupLogging:
|
||||||
|
"""Test setup_logging function."""
|
||||||
|
|
||||||
|
def test_configures_root_logger(self) -> None:
|
||||||
|
"""setup_logging must attach a JSON handler to the root logger."""
|
||||||
|
setup_logging(level=logging.DEBUG)
|
||||||
|
root = logging.getLogger()
|
||||||
|
json_handlers = [
|
||||||
|
h for h in root.handlers if isinstance(h.formatter, JSONFormatter)
|
||||||
|
]
|
||||||
|
assert len(json_handlers) == 1
|
||||||
|
assert root.level == logging.DEBUG
|
||||||
|
|
||||||
|
def test_avoids_duplicate_handlers(self) -> None:
|
||||||
|
"""Calling setup_logging twice must not add duplicate handlers."""
|
||||||
|
setup_logging()
|
||||||
|
setup_logging()
|
||||||
|
root = logging.getLogger()
|
||||||
|
assert len(root.handlers) == 1
|
||||||
@@ -18,10 +18,13 @@ from src.main import (
|
|||||||
_extract_held_codes_from_balance,
|
_extract_held_codes_from_balance,
|
||||||
_extract_held_qty_from_balance,
|
_extract_held_qty_from_balance,
|
||||||
_handle_market_close,
|
_handle_market_close,
|
||||||
|
_retry_connection,
|
||||||
_run_context_scheduler,
|
_run_context_scheduler,
|
||||||
_run_evolution_loop,
|
_run_evolution_loop,
|
||||||
_start_dashboard_server,
|
_start_dashboard_server,
|
||||||
|
run_daily_session,
|
||||||
safe_float,
|
safe_float,
|
||||||
|
sync_positions_from_broker,
|
||||||
trading_cycle,
|
trading_cycle,
|
||||||
)
|
)
|
||||||
from src.strategy.models import (
|
from src.strategy.models import (
|
||||||
@@ -1102,10 +1105,11 @@ class TestOverseasBalanceParsing:
|
|||||||
mock_telegram: MagicMock,
|
mock_telegram: MagicMock,
|
||||||
mock_overseas_market: MagicMock,
|
mock_overseas_market: MagicMock,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Overseas BUY order must use current_price (limit), not 0 (market).
|
"""Overseas BUY order must use current_price +0.2% limit, not market order.
|
||||||
|
|
||||||
KIS VTS rejects market orders for overseas paper trading.
|
KIS market orders (ORD_DVSN=01) calculate quantity based on upper limit price
|
||||||
Regression test for issue #149.
|
(상한가 기준), resulting in only 60-80% of intended cash being used.
|
||||||
|
Regression test for issue #149 / #211.
|
||||||
"""
|
"""
|
||||||
mock_telegram.notify_trade_execution = AsyncMock()
|
mock_telegram.notify_trade_execution = AsyncMock()
|
||||||
|
|
||||||
@@ -1126,14 +1130,93 @@ class TestOverseasBalanceParsing:
|
|||||||
scan_candidates={},
|
scan_candidates={},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify limit order was sent with actual price + 0.5% premium (issue #151), not 0.0
|
# Verify BUY limit order uses +0.2% premium (issue #211)
|
||||||
mock_overseas_broker_with_buy_scenario.send_overseas_order.assert_called_once()
|
mock_overseas_broker_with_buy_scenario.send_overseas_order.assert_called_once()
|
||||||
call_kwargs = mock_overseas_broker_with_buy_scenario.send_overseas_order.call_args
|
call_kwargs = mock_overseas_broker_with_buy_scenario.send_overseas_order.call_args
|
||||||
sent_price = call_kwargs[1].get("price") or call_kwargs[0][4]
|
sent_price = call_kwargs[1].get("price") or call_kwargs[0][4]
|
||||||
expected_price = round(182.5 * 1.005, 4) # 0.5% premium for BUY limit orders
|
expected_price = round(182.5 * 1.002, 4) # 0.2% premium for BUY limit orders
|
||||||
assert sent_price == expected_price, (
|
assert sent_price == expected_price, (
|
||||||
f"Expected limit price {expected_price} (182.5 * 1.005) but got {sent_price}. "
|
f"Expected limit price {expected_price} (182.5 * 1.002) but got {sent_price}. "
|
||||||
"KIS VTS only accepts limit orders; BUY uses 0.5% premium to improve fill rate."
|
"BUY uses +0.2% to improve fill rate while minimising overpayment (#211)."
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_overseas_sell_order_uses_limit_price_below_current(
|
||||||
|
self,
|
||||||
|
mock_domestic_broker: MagicMock,
|
||||||
|
mock_playbook: DayPlaybook,
|
||||||
|
mock_risk: MagicMock,
|
||||||
|
mock_db: MagicMock,
|
||||||
|
mock_decision_logger: MagicMock,
|
||||||
|
mock_context_store: MagicMock,
|
||||||
|
mock_criticality_assessor: MagicMock,
|
||||||
|
mock_telegram: MagicMock,
|
||||||
|
mock_overseas_market: MagicMock,
|
||||||
|
) -> None:
|
||||||
|
"""Overseas SELL order must use current_price -0.2% limit (#211).
|
||||||
|
|
||||||
|
Placing SELL at exact last price risks no-fill when the bid is just below.
|
||||||
|
Using -0.2% ensures the order fills even if the price dips slightly.
|
||||||
|
"""
|
||||||
|
sell_price = 182.5
|
||||||
|
|
||||||
|
# Broker mock: returns price data and a balance with 5 AAPL shares held.
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
overseas_broker.get_overseas_price = AsyncMock(
|
||||||
|
return_value={"output": {"last": str(sell_price), "rate": "1.5", "tvol": "5000000"}}
|
||||||
|
)
|
||||||
|
overseas_broker.get_overseas_balance = AsyncMock(
|
||||||
|
return_value={
|
||||||
|
"output1": [
|
||||||
|
{
|
||||||
|
"ovrs_pdno": "AAPL",
|
||||||
|
"ovrs_cblc_qty": "5",
|
||||||
|
"pchs_avg_pric": "170.0",
|
||||||
|
"evlu_pfls_rt": "7.35",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"frcr_evlu_tota": "100000.00",
|
||||||
|
"frcr_dncl_amt_2": "50000.00",
|
||||||
|
"frcr_buy_amt_smtl": "50000.00",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
overseas_broker.send_overseas_order = AsyncMock(
|
||||||
|
return_value={"rt_cd": "0", "msg1": "OK"}
|
||||||
|
)
|
||||||
|
|
||||||
|
sell_engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
sell_engine.evaluate = MagicMock(return_value=_make_sell_match("AAPL"))
|
||||||
|
mock_telegram.notify_trade_execution = AsyncMock()
|
||||||
|
|
||||||
|
with patch("src.main.log_trade"), patch("src.main.get_open_position") as mock_pos:
|
||||||
|
mock_pos.return_value = {"quantity": 5, "stock_code": "AAPL", "price": 170.0}
|
||||||
|
await trading_cycle(
|
||||||
|
broker=mock_domestic_broker,
|
||||||
|
overseas_broker=overseas_broker,
|
||||||
|
scenario_engine=sell_engine,
|
||||||
|
playbook=mock_playbook,
|
||||||
|
risk=mock_risk,
|
||||||
|
db_conn=mock_db,
|
||||||
|
decision_logger=mock_decision_logger,
|
||||||
|
context_store=mock_context_store,
|
||||||
|
criticality_assessor=mock_criticality_assessor,
|
||||||
|
telegram=mock_telegram,
|
||||||
|
market=mock_overseas_market,
|
||||||
|
stock_code="AAPL",
|
||||||
|
scan_candidates={},
|
||||||
|
)
|
||||||
|
|
||||||
|
overseas_broker.send_overseas_order.assert_called_once()
|
||||||
|
call_kwargs = overseas_broker.send_overseas_order.call_args
|
||||||
|
sent_price = call_kwargs[1].get("price") or call_kwargs[0][4]
|
||||||
|
expected_price = round(sell_price * 0.998, 4) # -0.2% for SELL limit orders
|
||||||
|
assert sent_price == expected_price, (
|
||||||
|
f"Expected SELL limit price {expected_price} (182.5 * 0.998) but got {sent_price}. "
|
||||||
|
"SELL uses -0.2% to ensure fill even when price dips slightly (#211)."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -3001,3 +3084,791 @@ async def test_buy_proceeds_when_no_open_position() -> None:
|
|||||||
|
|
||||||
# 포지션이 없으므로 해외 주문이 실행되어야 함
|
# 포지션이 없으므로 해외 주문이 실행되어야 함
|
||||||
overseas_broker.send_overseas_order.assert_called_once()
|
overseas_broker.send_overseas_order.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
class TestOverseasBrokerIntegration:
|
||||||
|
"""Test overseas broker live-balance gating for double-buy prevention.
|
||||||
|
|
||||||
|
Issue #195: KIS VTS SELL limit orders are accepted (rt_cd=0) immediately
|
||||||
|
but may not fill until the market price reaches the limit. During this window,
|
||||||
|
the DB records the position as closed, causing the next cycle to BUY again.
|
||||||
|
These tests verify that live broker balance is used as the authoritative source.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_overseas_buy_suppressed_by_broker_balance_when_db_shows_closed(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
"""BUY must be suppressed when broker still holds shares even if DB says closed.
|
||||||
|
|
||||||
|
Scenario: SELL limit order was accepted (DB shows closed), but hasn't
|
||||||
|
filled yet — broker balance still shows 10 AAPL shares.
|
||||||
|
Expected: send_overseas_order is NOT called.
|
||||||
|
"""
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
# DB: BUY then SELL recorded → get_open_position returns None (closed)
|
||||||
|
log_trade(
|
||||||
|
conn=db_conn,
|
||||||
|
stock_code="AAPL",
|
||||||
|
action="BUY",
|
||||||
|
confidence=90,
|
||||||
|
rationale="entry",
|
||||||
|
quantity=10,
|
||||||
|
price=180.0,
|
||||||
|
market="US_NASDAQ",
|
||||||
|
exchange_code="NASD",
|
||||||
|
)
|
||||||
|
log_trade(
|
||||||
|
conn=db_conn,
|
||||||
|
stock_code="AAPL",
|
||||||
|
action="SELL",
|
||||||
|
confidence=90,
|
||||||
|
rationale="sell order accepted",
|
||||||
|
quantity=10,
|
||||||
|
price=182.0,
|
||||||
|
market="US_NASDAQ",
|
||||||
|
exchange_code="NASD",
|
||||||
|
)
|
||||||
|
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
overseas_broker.get_overseas_price = AsyncMock(
|
||||||
|
return_value={"output": {"last": "182.50"}}
|
||||||
|
)
|
||||||
|
# 브로커: 여전히 AAPL 10주 보유 중 (SELL 미체결)
|
||||||
|
overseas_broker.get_overseas_balance = AsyncMock(
|
||||||
|
return_value={
|
||||||
|
"output1": [{"ovrs_pdno": "AAPL", "ovrs_cblc_qty": "10"}],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"frcr_dncl_amt_2": "50000.00",
|
||||||
|
"frcr_evlu_tota": "60000.00",
|
||||||
|
"frcr_buy_amt_smtl": "50000.00",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
overseas_broker.send_overseas_order = AsyncMock(return_value={"msg1": "주문접수"})
|
||||||
|
|
||||||
|
engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
engine.evaluate = MagicMock(return_value=_make_buy_match("AAPL"))
|
||||||
|
|
||||||
|
market = MagicMock()
|
||||||
|
market.name = "NASDAQ"
|
||||||
|
market.code = "US_NASDAQ"
|
||||||
|
market.exchange_code = "NASD"
|
||||||
|
market.is_domestic = False
|
||||||
|
|
||||||
|
telegram = MagicMock()
|
||||||
|
telegram.notify_trade_execution = AsyncMock()
|
||||||
|
telegram.notify_fat_finger = AsyncMock()
|
||||||
|
telegram.notify_circuit_breaker = AsyncMock()
|
||||||
|
telegram.notify_scenario_matched = AsyncMock()
|
||||||
|
|
||||||
|
decision_logger = MagicMock()
|
||||||
|
decision_logger.log_decision = MagicMock(return_value="decision-id")
|
||||||
|
|
||||||
|
await trading_cycle(
|
||||||
|
broker=MagicMock(),
|
||||||
|
overseas_broker=overseas_broker,
|
||||||
|
scenario_engine=engine,
|
||||||
|
playbook=_make_playbook(market="US"),
|
||||||
|
risk=MagicMock(),
|
||||||
|
db_conn=db_conn,
|
||||||
|
decision_logger=decision_logger,
|
||||||
|
context_store=MagicMock(
|
||||||
|
get_latest_timeframe=MagicMock(return_value=None),
|
||||||
|
set_context=MagicMock(),
|
||||||
|
),
|
||||||
|
criticality_assessor=MagicMock(
|
||||||
|
assess_market_conditions=MagicMock(return_value=MagicMock(value="NORMAL")),
|
||||||
|
get_timeout=MagicMock(return_value=5.0),
|
||||||
|
),
|
||||||
|
telegram=telegram,
|
||||||
|
market=market,
|
||||||
|
stock_code="AAPL",
|
||||||
|
scan_candidates={},
|
||||||
|
)
|
||||||
|
|
||||||
|
# 브로커 잔고에 보유 중이므로 BUY 주문이 억제되어야 함 (이중 매수 방지)
|
||||||
|
overseas_broker.send_overseas_order.assert_not_called()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_overseas_buy_proceeds_when_broker_shows_no_holding(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
"""BUY must proceed when both DB and broker confirm no existing holding.
|
||||||
|
|
||||||
|
Scenario: No prior trades in DB and broker balance shows no AAPL.
|
||||||
|
Expected: send_overseas_order IS called (normal buy flow).
|
||||||
|
"""
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
# DB: 레코드 없음 (신규 포지션)
|
||||||
|
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
overseas_broker.get_overseas_price = AsyncMock(
|
||||||
|
return_value={"output": {"last": "182.50"}}
|
||||||
|
)
|
||||||
|
# 브로커: AAPL 미보유
|
||||||
|
overseas_broker.get_overseas_balance = AsyncMock(
|
||||||
|
return_value={
|
||||||
|
"output1": [],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"frcr_dncl_amt_2": "50000.00",
|
||||||
|
"frcr_evlu_tota": "50000.00",
|
||||||
|
"frcr_buy_amt_smtl": "0.00",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
overseas_broker.send_overseas_order = AsyncMock(return_value={"msg1": "주문접수"})
|
||||||
|
|
||||||
|
engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
engine.evaluate = MagicMock(return_value=_make_buy_match("AAPL"))
|
||||||
|
|
||||||
|
market = MagicMock()
|
||||||
|
market.name = "NASDAQ"
|
||||||
|
market.code = "US_NASDAQ"
|
||||||
|
market.exchange_code = "NASD"
|
||||||
|
market.is_domestic = False
|
||||||
|
|
||||||
|
telegram = MagicMock()
|
||||||
|
telegram.notify_trade_execution = AsyncMock()
|
||||||
|
telegram.notify_fat_finger = AsyncMock()
|
||||||
|
telegram.notify_circuit_breaker = AsyncMock()
|
||||||
|
telegram.notify_scenario_matched = AsyncMock()
|
||||||
|
|
||||||
|
decision_logger = MagicMock()
|
||||||
|
decision_logger.log_decision = MagicMock(return_value="decision-id")
|
||||||
|
|
||||||
|
with patch("src.main.log_trade"):
|
||||||
|
await trading_cycle(
|
||||||
|
broker=MagicMock(),
|
||||||
|
overseas_broker=overseas_broker,
|
||||||
|
scenario_engine=engine,
|
||||||
|
playbook=_make_playbook(market="US"),
|
||||||
|
risk=MagicMock(),
|
||||||
|
db_conn=db_conn,
|
||||||
|
decision_logger=decision_logger,
|
||||||
|
context_store=MagicMock(
|
||||||
|
get_latest_timeframe=MagicMock(return_value=None),
|
||||||
|
set_context=MagicMock(),
|
||||||
|
),
|
||||||
|
criticality_assessor=MagicMock(
|
||||||
|
assess_market_conditions=MagicMock(return_value=MagicMock(value="NORMAL")),
|
||||||
|
get_timeout=MagicMock(return_value=5.0),
|
||||||
|
),
|
||||||
|
telegram=telegram,
|
||||||
|
market=market,
|
||||||
|
stock_code="AAPL",
|
||||||
|
scan_candidates={},
|
||||||
|
)
|
||||||
|
|
||||||
|
# DB도 브로커도 보유 없음 → BUY 주문이 실행되어야 함 (회귀 테스트)
|
||||||
|
overseas_broker.send_overseas_order.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# _retry_connection — unit tests (issue #209)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestRetryConnection:
|
||||||
|
"""Unit tests for the _retry_connection helper (issue #209)."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_success_on_first_attempt(self) -> None:
|
||||||
|
"""Returns the result immediately when the first call succeeds."""
|
||||||
|
async def ok() -> str:
|
||||||
|
return "data"
|
||||||
|
|
||||||
|
result = await _retry_connection(ok, label="test")
|
||||||
|
assert result == "data"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_succeeds_after_one_connection_error(self) -> None:
|
||||||
|
"""Retries once on ConnectionError and returns result on 2nd attempt."""
|
||||||
|
call_count = 0
|
||||||
|
|
||||||
|
async def flaky() -> str:
|
||||||
|
nonlocal call_count
|
||||||
|
call_count += 1
|
||||||
|
if call_count < 2:
|
||||||
|
raise ConnectionError("timeout")
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
with patch("src.main.asyncio.sleep") as mock_sleep:
|
||||||
|
mock_sleep.return_value = None
|
||||||
|
result = await _retry_connection(flaky, label="flaky")
|
||||||
|
|
||||||
|
assert result == "ok"
|
||||||
|
assert call_count == 2
|
||||||
|
mock_sleep.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_raises_after_all_retries_exhausted(self) -> None:
|
||||||
|
"""Raises ConnectionError after MAX_CONNECTION_RETRIES attempts."""
|
||||||
|
from src.main import MAX_CONNECTION_RETRIES
|
||||||
|
|
||||||
|
call_count = 0
|
||||||
|
|
||||||
|
async def always_fail() -> None:
|
||||||
|
nonlocal call_count
|
||||||
|
call_count += 1
|
||||||
|
raise ConnectionError("unreachable")
|
||||||
|
|
||||||
|
with patch("src.main.asyncio.sleep") as mock_sleep:
|
||||||
|
mock_sleep.return_value = None
|
||||||
|
with pytest.raises(ConnectionError, match="unreachable"):
|
||||||
|
await _retry_connection(always_fail, label="always_fail")
|
||||||
|
|
||||||
|
assert call_count == MAX_CONNECTION_RETRIES
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_passes_args_and_kwargs_to_factory(self) -> None:
|
||||||
|
"""Forwards positional and keyword arguments to the callable."""
|
||||||
|
received: dict = {}
|
||||||
|
|
||||||
|
async def capture(a: int, b: int, *, key: str) -> str:
|
||||||
|
received["a"] = a
|
||||||
|
received["b"] = b
|
||||||
|
received["key"] = key
|
||||||
|
return "captured"
|
||||||
|
|
||||||
|
result = await _retry_connection(capture, 1, 2, key="val", label="test")
|
||||||
|
assert result == "captured"
|
||||||
|
assert received == {"a": 1, "b": 2, "key": "val"}
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_non_connection_error_not_retried(self) -> None:
|
||||||
|
"""Non-ConnectionError exceptions propagate immediately without retry."""
|
||||||
|
call_count = 0
|
||||||
|
|
||||||
|
async def bad_input() -> None:
|
||||||
|
nonlocal call_count
|
||||||
|
call_count += 1
|
||||||
|
raise ValueError("bad data")
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="bad data"):
|
||||||
|
await _retry_connection(bad_input, label="bad")
|
||||||
|
|
||||||
|
assert call_count == 1 # No retry for non-ConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
# run_daily_session — daily CB baseline (daily_start_eval) tests (issue #207)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestDailyCBBaseline:
|
||||||
|
"""Tests for run_daily_session's daily_start_eval (CB baseline) behaviour.
|
||||||
|
|
||||||
|
Issue #207: CB P&L should be computed relative to the portfolio value at
|
||||||
|
the start of each trading day, not the cumulative purchase_total.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _make_settings(self) -> Settings:
|
||||||
|
return Settings(
|
||||||
|
KIS_APP_KEY="test-key",
|
||||||
|
KIS_APP_SECRET="test-secret",
|
||||||
|
KIS_ACCOUNT_NO="12345678-01",
|
||||||
|
GEMINI_API_KEY="test-gemini",
|
||||||
|
MODE="paper",
|
||||||
|
PAPER_OVERSEAS_CASH=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _make_domestic_balance(
|
||||||
|
self, tot_evlu_amt: float = 0.0, dnca_tot_amt: float = 50000.0
|
||||||
|
) -> dict:
|
||||||
|
return {
|
||||||
|
"output1": [],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"tot_evlu_amt": str(tot_evlu_amt),
|
||||||
|
"dnca_tot_amt": str(dnca_tot_amt),
|
||||||
|
"pchs_amt_smtl_amt": "40000.0",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_daily_start_eval_when_no_markets_open(self) -> None:
|
||||||
|
"""run_daily_session returns the unchanged daily_start_eval when no markets are open."""
|
||||||
|
with patch("src.main.get_open_markets", return_value=[]):
|
||||||
|
result = await run_daily_session(
|
||||||
|
broker=MagicMock(),
|
||||||
|
overseas_broker=MagicMock(),
|
||||||
|
scenario_engine=MagicMock(),
|
||||||
|
playbook_store=MagicMock(),
|
||||||
|
pre_market_planner=MagicMock(),
|
||||||
|
risk=MagicMock(),
|
||||||
|
db_conn=init_db(":memory:"),
|
||||||
|
decision_logger=MagicMock(),
|
||||||
|
context_store=MagicMock(),
|
||||||
|
criticality_assessor=MagicMock(),
|
||||||
|
telegram=MagicMock(),
|
||||||
|
settings=self._make_settings(),
|
||||||
|
smart_scanner=None,
|
||||||
|
daily_start_eval=12345.0,
|
||||||
|
)
|
||||||
|
assert result == 12345.0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_zero_when_no_markets_and_no_baseline(self) -> None:
|
||||||
|
"""run_daily_session returns 0.0 when no markets are open and daily_start_eval=0."""
|
||||||
|
with patch("src.main.get_open_markets", return_value=[]):
|
||||||
|
result = await run_daily_session(
|
||||||
|
broker=MagicMock(),
|
||||||
|
overseas_broker=MagicMock(),
|
||||||
|
scenario_engine=MagicMock(),
|
||||||
|
playbook_store=MagicMock(),
|
||||||
|
pre_market_planner=MagicMock(),
|
||||||
|
risk=MagicMock(),
|
||||||
|
db_conn=init_db(":memory:"),
|
||||||
|
decision_logger=MagicMock(),
|
||||||
|
context_store=MagicMock(),
|
||||||
|
criticality_assessor=MagicMock(),
|
||||||
|
telegram=MagicMock(),
|
||||||
|
settings=self._make_settings(),
|
||||||
|
smart_scanner=None,
|
||||||
|
daily_start_eval=0.0,
|
||||||
|
)
|
||||||
|
assert result == 0.0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_captures_total_eval_as_baseline_on_first_session(self) -> None:
|
||||||
|
"""When daily_start_eval=0 and balance returns a positive total_eval, the returned
|
||||||
|
value equals total_eval (the captured baseline for the day)."""
|
||||||
|
from src.analysis.smart_scanner import ScanCandidate
|
||||||
|
|
||||||
|
settings = self._make_settings()
|
||||||
|
broker = MagicMock()
|
||||||
|
# Domestic balance: tot_evlu_amt=55000
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value=self._make_domestic_balance(tot_evlu_amt=55000.0)
|
||||||
|
)
|
||||||
|
# Price data for the stock
|
||||||
|
broker.get_current_price = AsyncMock(
|
||||||
|
return_value=(100.0, 1.5, 100.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
market = MagicMock()
|
||||||
|
market.name = "KR"
|
||||||
|
market.code = "KR"
|
||||||
|
market.exchange_code = "KRX"
|
||||||
|
market.is_domestic = True
|
||||||
|
market.timezone = __import__("zoneinfo").ZoneInfo("Asia/Seoul")
|
||||||
|
|
||||||
|
smart_scanner = MagicMock()
|
||||||
|
smart_scanner.scan = AsyncMock(
|
||||||
|
return_value=[
|
||||||
|
ScanCandidate(
|
||||||
|
stock_code="005930",
|
||||||
|
name="Samsung",
|
||||||
|
price=100.0,
|
||||||
|
volume=1_000_000.0,
|
||||||
|
volume_ratio=2.5,
|
||||||
|
rsi=45.0,
|
||||||
|
signal="momentum",
|
||||||
|
score=80.0,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
playbook_store = MagicMock()
|
||||||
|
playbook_store.load = MagicMock(return_value=_make_playbook("KR"))
|
||||||
|
|
||||||
|
scenario_engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
scenario_engine.evaluate = MagicMock(return_value=_make_hold_match("005930"))
|
||||||
|
|
||||||
|
risk = MagicMock()
|
||||||
|
risk.check_circuit_breaker = MagicMock()
|
||||||
|
risk.check_fat_finger = MagicMock()
|
||||||
|
|
||||||
|
telegram = MagicMock()
|
||||||
|
telegram.notify_trade_execution = AsyncMock()
|
||||||
|
telegram.notify_scenario_matched = AsyncMock()
|
||||||
|
|
||||||
|
decision_logger = MagicMock()
|
||||||
|
decision_logger.log_decision = MagicMock(return_value="d1")
|
||||||
|
|
||||||
|
async def _passthrough(fn, *a, label: str = "", **kw): # type: ignore[override]
|
||||||
|
return await fn(*a, **kw)
|
||||||
|
|
||||||
|
with patch("src.main.get_open_markets", return_value=[market]), \
|
||||||
|
patch("src.main._retry_connection", new=_passthrough):
|
||||||
|
result = await run_daily_session(
|
||||||
|
broker=broker,
|
||||||
|
overseas_broker=MagicMock(),
|
||||||
|
scenario_engine=scenario_engine,
|
||||||
|
playbook_store=playbook_store,
|
||||||
|
pre_market_planner=MagicMock(),
|
||||||
|
risk=risk,
|
||||||
|
db_conn=init_db(":memory:"),
|
||||||
|
decision_logger=decision_logger,
|
||||||
|
context_store=MagicMock(),
|
||||||
|
criticality_assessor=MagicMock(),
|
||||||
|
telegram=telegram,
|
||||||
|
settings=settings,
|
||||||
|
smart_scanner=smart_scanner,
|
||||||
|
daily_start_eval=0.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == 55000.0 # captured from tot_evlu_amt
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_does_not_overwrite_existing_baseline(self) -> None:
|
||||||
|
"""When daily_start_eval > 0, it must not be overwritten even if balance returns
|
||||||
|
a different value (baseline is fixed at the start of each trading day)."""
|
||||||
|
from src.analysis.smart_scanner import ScanCandidate
|
||||||
|
|
||||||
|
settings = self._make_settings()
|
||||||
|
broker = MagicMock()
|
||||||
|
# Balance reports a different eval value (market moved during the day)
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value=self._make_domestic_balance(tot_evlu_amt=58000.0)
|
||||||
|
)
|
||||||
|
broker.get_current_price = AsyncMock(return_value=(100.0, 1.5, 100.0))
|
||||||
|
|
||||||
|
market = MagicMock()
|
||||||
|
market.name = "KR"
|
||||||
|
market.code = "KR"
|
||||||
|
market.exchange_code = "KRX"
|
||||||
|
market.is_domestic = True
|
||||||
|
market.timezone = __import__("zoneinfo").ZoneInfo("Asia/Seoul")
|
||||||
|
|
||||||
|
smart_scanner = MagicMock()
|
||||||
|
smart_scanner.scan = AsyncMock(
|
||||||
|
return_value=[
|
||||||
|
ScanCandidate(
|
||||||
|
stock_code="005930",
|
||||||
|
name="Samsung",
|
||||||
|
price=100.0,
|
||||||
|
volume=1_000_000.0,
|
||||||
|
volume_ratio=2.5,
|
||||||
|
rsi=45.0,
|
||||||
|
signal="momentum",
|
||||||
|
score=80.0,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
playbook_store = MagicMock()
|
||||||
|
playbook_store.load = MagicMock(return_value=_make_playbook("KR"))
|
||||||
|
|
||||||
|
scenario_engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
scenario_engine.evaluate = MagicMock(return_value=_make_hold_match("005930"))
|
||||||
|
|
||||||
|
risk = MagicMock()
|
||||||
|
risk.check_circuit_breaker = MagicMock()
|
||||||
|
|
||||||
|
telegram = MagicMock()
|
||||||
|
telegram.notify_trade_execution = AsyncMock()
|
||||||
|
telegram.notify_scenario_matched = AsyncMock()
|
||||||
|
|
||||||
|
decision_logger = MagicMock()
|
||||||
|
decision_logger.log_decision = MagicMock(return_value="d1")
|
||||||
|
|
||||||
|
async def _passthrough(fn, *a, label: str = "", **kw): # type: ignore[override]
|
||||||
|
return await fn(*a, **kw)
|
||||||
|
|
||||||
|
with patch("src.main.get_open_markets", return_value=[market]), \
|
||||||
|
patch("src.main._retry_connection", new=_passthrough):
|
||||||
|
result = await run_daily_session(
|
||||||
|
broker=broker,
|
||||||
|
overseas_broker=MagicMock(),
|
||||||
|
scenario_engine=scenario_engine,
|
||||||
|
playbook_store=playbook_store,
|
||||||
|
pre_market_planner=MagicMock(),
|
||||||
|
risk=risk,
|
||||||
|
db_conn=init_db(":memory:"),
|
||||||
|
decision_logger=decision_logger,
|
||||||
|
context_store=MagicMock(),
|
||||||
|
criticality_assessor=MagicMock(),
|
||||||
|
telegram=telegram,
|
||||||
|
settings=settings,
|
||||||
|
smart_scanner=smart_scanner,
|
||||||
|
daily_start_eval=55000.0, # existing baseline
|
||||||
|
)
|
||||||
|
|
||||||
|
# Must return the original baseline, NOT the new total_eval (58000)
|
||||||
|
assert result == 55000.0
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# sync_positions_from_broker — startup DB sync tests (issue #206)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestSyncPositionsFromBroker:
|
||||||
|
"""Tests for sync_positions_from_broker() startup position sync (issue #206).
|
||||||
|
|
||||||
|
The function queries broker balances at startup and inserts synthetic BUY
|
||||||
|
records for any holdings that the local DB is unaware of, preventing
|
||||||
|
double-buy when positions were opened in a previous session or manually.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _make_settings(self, enabled_markets: str = "KR") -> Settings:
|
||||||
|
return Settings(
|
||||||
|
KIS_APP_KEY="k",
|
||||||
|
KIS_APP_SECRET="s",
|
||||||
|
KIS_ACCOUNT_NO="12345678-01",
|
||||||
|
GEMINI_API_KEY="g",
|
||||||
|
ENABLED_MARKETS=enabled_markets,
|
||||||
|
MODE="paper",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _domestic_balance(
|
||||||
|
self,
|
||||||
|
stock_code: str = "005930",
|
||||||
|
qty: int = 5,
|
||||||
|
) -> dict:
|
||||||
|
return {
|
||||||
|
"output1": [{"pdno": stock_code, "ord_psbl_qty": str(qty)}],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"tot_evlu_amt": "1000000",
|
||||||
|
"dnca_tot_amt": "500000",
|
||||||
|
"pchs_amt_smtl_amt": "500000",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
def _overseas_balance(
|
||||||
|
self,
|
||||||
|
stock_code: str = "AAPL",
|
||||||
|
qty: int = 10,
|
||||||
|
) -> dict:
|
||||||
|
return {
|
||||||
|
"output1": [{"ovrs_pdno": stock_code, "ovrs_cblc_qty": str(qty)}],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"frcr_evlu_tota": "50000",
|
||||||
|
"frcr_dncl_amt_2": "10000",
|
||||||
|
"frcr_buy_amt_smtl": "40000",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_syncs_domestic_position_not_in_db(self) -> None:
|
||||||
|
"""A domestic holding found in broker but absent from DB is inserted."""
|
||||||
|
settings = self._make_settings("KR")
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value=self._domestic_balance("005930", qty=7)
|
||||||
|
)
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
|
||||||
|
synced = await sync_positions_from_broker(
|
||||||
|
broker, overseas_broker, db_conn, settings
|
||||||
|
)
|
||||||
|
|
||||||
|
assert synced == 1
|
||||||
|
from src.db import get_open_position
|
||||||
|
pos = get_open_position(db_conn, "005930", "KR")
|
||||||
|
assert pos is not None
|
||||||
|
assert pos["quantity"] == 7
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_skips_position_already_in_db(self) -> None:
|
||||||
|
"""No duplicate record is created when the position already exists in DB."""
|
||||||
|
settings = self._make_settings("KR")
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
# Pre-insert a BUY record
|
||||||
|
log_trade(
|
||||||
|
conn=db_conn,
|
||||||
|
stock_code="005930",
|
||||||
|
action="BUY",
|
||||||
|
confidence=85,
|
||||||
|
rationale="existing position",
|
||||||
|
quantity=5,
|
||||||
|
price=70000.0,
|
||||||
|
market="KR",
|
||||||
|
exchange_code="KRX",
|
||||||
|
)
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value=self._domestic_balance("005930", qty=5)
|
||||||
|
)
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
|
||||||
|
synced = await sync_positions_from_broker(
|
||||||
|
broker, overseas_broker, db_conn, settings
|
||||||
|
)
|
||||||
|
|
||||||
|
assert synced == 0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_syncs_overseas_position_not_in_db(self) -> None:
|
||||||
|
"""An overseas holding found in broker but absent from DB is inserted."""
|
||||||
|
settings = self._make_settings("US_NASDAQ")
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
overseas_broker.get_overseas_balance = AsyncMock(
|
||||||
|
return_value=self._overseas_balance("AAPL", qty=10)
|
||||||
|
)
|
||||||
|
|
||||||
|
synced = await sync_positions_from_broker(
|
||||||
|
broker, overseas_broker, db_conn, settings
|
||||||
|
)
|
||||||
|
|
||||||
|
assert synced == 1
|
||||||
|
from src.db import get_open_position
|
||||||
|
pos = get_open_position(db_conn, "AAPL", "US_NASDAQ")
|
||||||
|
assert pos is not None
|
||||||
|
assert pos["quantity"] == 10
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_zero_when_broker_has_no_holdings(self) -> None:
|
||||||
|
"""Returns 0 when broker reports empty holdings."""
|
||||||
|
settings = self._make_settings("KR")
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value={"output1": [], "output2": [{}]}
|
||||||
|
)
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
|
||||||
|
synced = await sync_positions_from_broker(
|
||||||
|
broker, overseas_broker, db_conn, settings
|
||||||
|
)
|
||||||
|
|
||||||
|
assert synced == 0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_handles_connection_error_gracefully(self) -> None:
|
||||||
|
"""ConnectionError during balance fetch is logged but does not raise."""
|
||||||
|
settings = self._make_settings("KR")
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
side_effect=ConnectionError("KIS unreachable")
|
||||||
|
)
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
|
||||||
|
synced = await sync_positions_from_broker(
|
||||||
|
broker, overseas_broker, db_conn, settings
|
||||||
|
)
|
||||||
|
|
||||||
|
assert synced == 0 # Failure treated as no-op
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_deduplicates_exchange_codes_for_overseas(self) -> None:
|
||||||
|
"""Each exchange code is queried at most once even if multiple market
|
||||||
|
codes share the same exchange (defensive deduplication)."""
|
||||||
|
# Both US_NASDAQ and a hypothetical duplicate would share "NASD"
|
||||||
|
# Use two DIFFERENT overseas markets (NASD vs NYSE) to verify each is
|
||||||
|
# queried separately.
|
||||||
|
settings = self._make_settings("US_NASDAQ,US_NYSE")
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
overseas_broker = MagicMock()
|
||||||
|
overseas_broker.get_overseas_balance = AsyncMock(
|
||||||
|
return_value={"output1": [], "output2": [{}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
await sync_positions_from_broker(
|
||||||
|
broker, overseas_broker, db_conn, settings
|
||||||
|
)
|
||||||
|
|
||||||
|
# Two distinct exchange codes (NASD, NYSE) → 2 calls
|
||||||
|
assert overseas_broker.get_overseas_balance.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Domestic BUY double-prevention (issue #206) — trading_cycle integration
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestDomesticBuyDoublePreventionTradingCycle:
|
||||||
|
"""Verify domestic BUY suppression using broker balance in trading_cycle.
|
||||||
|
|
||||||
|
Issue #206: the broker-balance check was overseas-only; domestic stocks
|
||||||
|
were not protected against double-buy caused by untracked positions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_domestic_buy_suppressed_when_broker_holds_stock(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
"""BUY for a domestic stock must be suppressed when broker holds it,
|
||||||
|
even if the DB shows no open position."""
|
||||||
|
db_conn = init_db(":memory:")
|
||||||
|
# DB: no open position for 005930
|
||||||
|
|
||||||
|
broker = MagicMock()
|
||||||
|
broker.get_current_price = AsyncMock(return_value=(70000.0, 1.0, 0.0))
|
||||||
|
# Broker balance: holds 5 shares of 005930
|
||||||
|
broker.get_balance = AsyncMock(
|
||||||
|
return_value={
|
||||||
|
"output1": [{"pdno": "005930", "ord_psbl_qty": "5"}],
|
||||||
|
"output2": [
|
||||||
|
{
|
||||||
|
"tot_evlu_amt": "1000000",
|
||||||
|
"dnca_tot_amt": "500000",
|
||||||
|
"pchs_amt_smtl_amt": "500000",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
broker.send_order = AsyncMock(return_value={"msg1": "주문접수"})
|
||||||
|
|
||||||
|
market = MagicMock()
|
||||||
|
market.name = "KR"
|
||||||
|
market.code = "KR"
|
||||||
|
market.exchange_code = "KRX"
|
||||||
|
market.is_domestic = True
|
||||||
|
|
||||||
|
engine = MagicMock(spec=ScenarioEngine)
|
||||||
|
engine.evaluate = MagicMock(return_value=_make_buy_match("005930"))
|
||||||
|
|
||||||
|
telegram = MagicMock()
|
||||||
|
telegram.notify_trade_execution = AsyncMock()
|
||||||
|
telegram.notify_fat_finger = AsyncMock()
|
||||||
|
telegram.notify_circuit_breaker = AsyncMock()
|
||||||
|
telegram.notify_scenario_matched = AsyncMock()
|
||||||
|
|
||||||
|
decision_logger = MagicMock()
|
||||||
|
decision_logger.log_decision = MagicMock(return_value="d1")
|
||||||
|
|
||||||
|
settings = Settings(
|
||||||
|
KIS_APP_KEY="k",
|
||||||
|
KIS_APP_SECRET="s",
|
||||||
|
KIS_ACCOUNT_NO="12345678-01",
|
||||||
|
GEMINI_API_KEY="g",
|
||||||
|
MODE="paper",
|
||||||
|
)
|
||||||
|
|
||||||
|
await trading_cycle(
|
||||||
|
broker=broker,
|
||||||
|
overseas_broker=MagicMock(),
|
||||||
|
scenario_engine=engine,
|
||||||
|
playbook=_make_playbook(market="KR"),
|
||||||
|
risk=MagicMock(),
|
||||||
|
db_conn=db_conn,
|
||||||
|
decision_logger=decision_logger,
|
||||||
|
context_store=MagicMock(
|
||||||
|
get_latest_timeframe=MagicMock(return_value=None),
|
||||||
|
set_context=MagicMock(),
|
||||||
|
),
|
||||||
|
criticality_assessor=MagicMock(
|
||||||
|
assess_market_conditions=MagicMock(return_value=MagicMock(value="NORMAL")),
|
||||||
|
get_timeout=MagicMock(return_value=5.0),
|
||||||
|
),
|
||||||
|
telegram=telegram,
|
||||||
|
settings=settings,
|
||||||
|
market=market,
|
||||||
|
stock_code="005930",
|
||||||
|
scan_candidates={"KR": {}},
|
||||||
|
)
|
||||||
|
|
||||||
|
# BUY must NOT have been executed because broker still holds the stock
|
||||||
|
broker.send_order.assert_not_called()
|
||||||
|
|||||||
@@ -640,4 +640,176 @@ class TestPaperOverseasCash:
|
|||||||
GEMINI_API_KEY="g",
|
GEMINI_API_KEY="g",
|
||||||
)
|
)
|
||||||
assert settings.PAPER_OVERSEAS_CASH == 0.0
|
assert settings.PAPER_OVERSEAS_CASH == 0.0
|
||||||
del os.environ["PAPER_OVERSEAS_CASH"]
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TR_ID live/paper branching — overseas (issues #201, #203)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _make_overseas_broker_with_mode(mode: str) -> OverseasBroker:
|
||||||
|
s = Settings(
|
||||||
|
KIS_APP_KEY="k",
|
||||||
|
KIS_APP_SECRET="s",
|
||||||
|
KIS_ACCOUNT_NO="12345678-01",
|
||||||
|
GEMINI_API_KEY="g",
|
||||||
|
DB_PATH=":memory:",
|
||||||
|
MODE=mode,
|
||||||
|
)
|
||||||
|
kis = KISBroker(s)
|
||||||
|
kis._access_token = "tok"
|
||||||
|
kis._token_expires_at = float("inf")
|
||||||
|
kis._rate_limiter.acquire = AsyncMock()
|
||||||
|
return OverseasBroker(kis)
|
||||||
|
|
||||||
|
|
||||||
|
class TestOverseasTRIDBranching:
|
||||||
|
"""get_overseas_balance and send_overseas_order must use correct TR_ID."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_overseas_balance_paper_uses_vtts3012r(self) -> None:
|
||||||
|
broker = _make_overseas_broker_with_mode("paper")
|
||||||
|
captured: list[str] = []
|
||||||
|
|
||||||
|
async def mock_auth_headers(tr_id: str) -> dict:
|
||||||
|
captured.append(tr_id)
|
||||||
|
return {"tr_id": tr_id, "authorization": "Bearer tok"}
|
||||||
|
|
||||||
|
broker._broker._auth_headers = mock_auth_headers # type: ignore[method-assign]
|
||||||
|
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(return_value={"output1": [], "output2": []})
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_session = MagicMock()
|
||||||
|
mock_session.get = MagicMock(return_value=mock_resp)
|
||||||
|
broker._broker._get_session = MagicMock(return_value=mock_session)
|
||||||
|
|
||||||
|
await broker.get_overseas_balance("NASD")
|
||||||
|
assert "VTTS3012R" in captured
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_overseas_balance_live_uses_ttts3012r(self) -> None:
|
||||||
|
broker = _make_overseas_broker_with_mode("live")
|
||||||
|
captured: list[str] = []
|
||||||
|
|
||||||
|
async def mock_auth_headers(tr_id: str) -> dict:
|
||||||
|
captured.append(tr_id)
|
||||||
|
return {"tr_id": tr_id, "authorization": "Bearer tok"}
|
||||||
|
|
||||||
|
broker._broker._auth_headers = mock_auth_headers # type: ignore[method-assign]
|
||||||
|
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(return_value={"output1": [], "output2": []})
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_session = MagicMock()
|
||||||
|
mock_session.get = MagicMock(return_value=mock_resp)
|
||||||
|
broker._broker._get_session = MagicMock(return_value=mock_session)
|
||||||
|
|
||||||
|
await broker.get_overseas_balance("NASD")
|
||||||
|
assert "TTTS3012R" in captured
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_overseas_order_buy_paper_uses_vttt1002u(self) -> None:
|
||||||
|
broker = _make_overseas_broker_with_mode("paper")
|
||||||
|
captured: list[str] = []
|
||||||
|
|
||||||
|
async def mock_auth_headers(tr_id: str) -> dict:
|
||||||
|
captured.append(tr_id)
|
||||||
|
return {"tr_id": tr_id, "authorization": "Bearer tok"}
|
||||||
|
|
||||||
|
broker._broker._auth_headers = mock_auth_headers # type: ignore[method-assign]
|
||||||
|
broker._broker._get_hash_key = AsyncMock(return_value="h") # type: ignore[method-assign]
|
||||||
|
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(return_value={"rt_cd": "0", "msg1": "OK"})
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_session = MagicMock()
|
||||||
|
mock_session.post = MagicMock(return_value=mock_resp)
|
||||||
|
broker._broker._get_session = MagicMock(return_value=mock_session)
|
||||||
|
|
||||||
|
await broker.send_overseas_order("NASD", "AAPL", "BUY", 1)
|
||||||
|
assert "VTTT1002U" in captured
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_overseas_order_buy_live_uses_tttt1002u(self) -> None:
|
||||||
|
broker = _make_overseas_broker_with_mode("live")
|
||||||
|
captured: list[str] = []
|
||||||
|
|
||||||
|
async def mock_auth_headers(tr_id: str) -> dict:
|
||||||
|
captured.append(tr_id)
|
||||||
|
return {"tr_id": tr_id, "authorization": "Bearer tok"}
|
||||||
|
|
||||||
|
broker._broker._auth_headers = mock_auth_headers # type: ignore[method-assign]
|
||||||
|
broker._broker._get_hash_key = AsyncMock(return_value="h") # type: ignore[method-assign]
|
||||||
|
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(return_value={"rt_cd": "0", "msg1": "OK"})
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_session = MagicMock()
|
||||||
|
mock_session.post = MagicMock(return_value=mock_resp)
|
||||||
|
broker._broker._get_session = MagicMock(return_value=mock_session)
|
||||||
|
|
||||||
|
await broker.send_overseas_order("NASD", "AAPL", "BUY", 1)
|
||||||
|
assert "TTTT1002U" in captured
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_overseas_order_sell_paper_uses_vttt1001u(self) -> None:
|
||||||
|
broker = _make_overseas_broker_with_mode("paper")
|
||||||
|
captured: list[str] = []
|
||||||
|
|
||||||
|
async def mock_auth_headers(tr_id: str) -> dict:
|
||||||
|
captured.append(tr_id)
|
||||||
|
return {"tr_id": tr_id, "authorization": "Bearer tok"}
|
||||||
|
|
||||||
|
broker._broker._auth_headers = mock_auth_headers # type: ignore[method-assign]
|
||||||
|
broker._broker._get_hash_key = AsyncMock(return_value="h") # type: ignore[method-assign]
|
||||||
|
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(return_value={"rt_cd": "0", "msg1": "OK"})
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_session = MagicMock()
|
||||||
|
mock_session.post = MagicMock(return_value=mock_resp)
|
||||||
|
broker._broker._get_session = MagicMock(return_value=mock_session)
|
||||||
|
|
||||||
|
await broker.send_overseas_order("NASD", "AAPL", "SELL", 1)
|
||||||
|
assert "VTTT1001U" in captured
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_send_overseas_order_sell_live_uses_tttt1006u(self) -> None:
|
||||||
|
broker = _make_overseas_broker_with_mode("live")
|
||||||
|
captured: list[str] = []
|
||||||
|
|
||||||
|
async def mock_auth_headers(tr_id: str) -> dict:
|
||||||
|
captured.append(tr_id)
|
||||||
|
return {"tr_id": tr_id, "authorization": "Bearer tok"}
|
||||||
|
|
||||||
|
broker._broker._auth_headers = mock_auth_headers # type: ignore[method-assign]
|
||||||
|
broker._broker._get_hash_key = AsyncMock(return_value="h") # type: ignore[method-assign]
|
||||||
|
|
||||||
|
mock_resp = AsyncMock()
|
||||||
|
mock_resp.status = 200
|
||||||
|
mock_resp.json = AsyncMock(return_value={"rt_cd": "0", "msg1": "OK"})
|
||||||
|
mock_resp.__aenter__ = AsyncMock(return_value=mock_resp)
|
||||||
|
mock_resp.__aexit__ = AsyncMock(return_value=False)
|
||||||
|
|
||||||
|
mock_session = MagicMock()
|
||||||
|
mock_session.post = MagicMock(return_value=mock_resp)
|
||||||
|
broker._broker._get_session = MagicMock(return_value=mock_session)
|
||||||
|
|
||||||
|
await broker.send_overseas_order("NASD", "AAPL", "SELL", 1)
|
||||||
|
assert "TTTT1006U" in captured
|
||||||
|
|||||||
32
tests/test_strategies_base.py
Normal file
32
tests/test_strategies_base.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""Tests for BaseStrategy abstract class."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from src.strategies.base import BaseStrategy
|
||||||
|
|
||||||
|
|
||||||
|
class ConcreteStrategy(BaseStrategy):
|
||||||
|
"""Minimal concrete strategy for testing."""
|
||||||
|
|
||||||
|
def evaluate(self, market_data: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
return {"action": "HOLD", "confidence": 50, "rationale": "test"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_base_strategy_cannot_be_instantiated() -> None:
|
||||||
|
"""BaseStrategy cannot be instantiated directly (it's abstract)."""
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
BaseStrategy() # type: ignore[abstract]
|
||||||
|
|
||||||
|
|
||||||
|
def test_concrete_strategy_evaluate_returns_decision() -> None:
|
||||||
|
"""Concrete subclass must implement evaluate and return a dict."""
|
||||||
|
strategy = ConcreteStrategy()
|
||||||
|
result = strategy.evaluate({"close": [100.0, 101.0]})
|
||||||
|
assert isinstance(result, dict)
|
||||||
|
assert result["action"] == "HOLD"
|
||||||
|
assert result["confidence"] == 50
|
||||||
|
assert "rationale" in result
|
||||||
Reference in New Issue
Block a user