713 lines
25 KiB
Python
713 lines
25 KiB
Python
# -*- coding: utf-8 -*-
|
|
"""Stock-related Gateway handlers extracted from the main Gateway module."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import asyncio
|
|
import json
|
|
import logging
|
|
from datetime import datetime, timedelta
|
|
from typing import Any
|
|
|
|
from backend.data.provider_utils import normalize_symbol
|
|
from backend.domains import news as news_domain
|
|
from backend.domains import trading as trading_domain
|
|
from backend.enrich.news_enricher import enrich_news_for_symbol
|
|
from backend.enrich.llm_enricher import llm_enrichment_enabled
|
|
from backend.tools.data_tools import prices_to_df
|
|
from shared.client import NewsServiceClient, TradingServiceClient
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
async def handle_get_stock_history(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_history_loaded",
|
|
"ticker": "",
|
|
"prices": [],
|
|
"source": None,
|
|
"error": "invalid ticker",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
lookback_days = data.get("lookback_days", 90)
|
|
try:
|
|
lookback_days = max(7, min(int(lookback_days), 365))
|
|
except (TypeError, ValueError):
|
|
lookback_days = 90
|
|
|
|
end_date = gateway.state_sync.state.get("current_date") or datetime.now().strftime("%Y-%m-%d")
|
|
try:
|
|
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
|
except ValueError:
|
|
end_dt = datetime.now()
|
|
end_date = end_dt.strftime("%Y-%m-%d")
|
|
start_date = (end_dt - timedelta(days=lookback_days)).strftime("%Y-%m-%d")
|
|
|
|
prices = []
|
|
source = "polygon"
|
|
response = await gateway._call_trading_service(
|
|
"get_prices for history",
|
|
lambda client: client.get_prices(ticker=ticker, start_date=start_date, end_date=end_date),
|
|
)
|
|
if response is not None:
|
|
prices = response.prices
|
|
source = "trading_service"
|
|
|
|
if not prices:
|
|
prices = await asyncio.to_thread(gateway.storage.market_store.get_ohlc, ticker, start_date, end_date)
|
|
if not prices:
|
|
payload = await asyncio.to_thread(
|
|
trading_domain.get_prices_payload,
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
)
|
|
prices = payload.get("prices") or []
|
|
usage_snapshot = gateway._provider_router.get_usage_snapshot()
|
|
source = usage_snapshot.get("last_success", {}).get("prices")
|
|
if prices:
|
|
await asyncio.to_thread(
|
|
gateway.storage.market_store.upsert_ohlc,
|
|
ticker,
|
|
[price.model_dump() for price in prices],
|
|
source=source or "provider",
|
|
)
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_history_loaded",
|
|
"ticker": ticker,
|
|
"prices": [price if isinstance(price, dict) else price.model_dump() for price in prices][-120:],
|
|
"source": source,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_explain_events(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
snapshot = gateway.storage.runtime_db.get_stock_explain_snapshot(ticker)
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_explain_events_loaded",
|
|
"ticker": ticker,
|
|
"events": snapshot.get("events", []),
|
|
"signals": snapshot.get("signals", []),
|
|
"trades": snapshot.get("trades", []),
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_news(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_loaded",
|
|
"ticker": "",
|
|
"news": [],
|
|
"source": None,
|
|
"error": "invalid ticker",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
lookback_days = data.get("lookback_days", 30)
|
|
limit = data.get("limit", 12)
|
|
try:
|
|
lookback_days = max(7, min(int(lookback_days), 180))
|
|
except (TypeError, ValueError):
|
|
lookback_days = 30
|
|
try:
|
|
limit = max(1, min(int(limit), 30))
|
|
except (TypeError, ValueError):
|
|
limit = 12
|
|
|
|
end_date = gateway.state_sync.state.get("current_date") or datetime.now().strftime("%Y-%m-%d")
|
|
try:
|
|
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
|
except ValueError:
|
|
end_dt = datetime.now()
|
|
end_date = end_dt.strftime("%Y-%m-%d")
|
|
start_date = (end_dt - timedelta(days=lookback_days)).strftime("%Y-%m-%d")
|
|
|
|
news_rows = []
|
|
source = "polygon"
|
|
response = await gateway._call_news_service(
|
|
"get_enriched_news",
|
|
lambda client: client.get_enriched_news(
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=limit,
|
|
),
|
|
)
|
|
if response is not None:
|
|
news_rows = response.get("news") or []
|
|
source = "news_service"
|
|
|
|
if not news_rows:
|
|
payload = await asyncio.to_thread(
|
|
news_domain.get_enriched_news,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=max(limit, 50),
|
|
)
|
|
news_rows = (payload.get("news") or [])[-limit:]
|
|
source = "market_store"
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_loaded",
|
|
"ticker": ticker,
|
|
"news": news_rows[-limit:],
|
|
"source": source,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_news_for_date(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
trade_date = str(data.get("date") or "").strip()
|
|
if not ticker or not trade_date:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_for_date_loaded",
|
|
"ticker": ticker,
|
|
"date": trade_date,
|
|
"news": [],
|
|
"error": "ticker and date are required",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
limit = data.get("limit", 20)
|
|
try:
|
|
limit = max(1, min(int(limit), 50))
|
|
except (TypeError, ValueError):
|
|
limit = 20
|
|
|
|
source = "market_store"
|
|
news_rows = []
|
|
response = await gateway._call_news_service(
|
|
"get_news_for_date",
|
|
lambda client: client.get_news_for_date(ticker=ticker, date=trade_date, limit=limit),
|
|
)
|
|
if response is not None:
|
|
news_rows = response.get("news") or []
|
|
source = "news_service"
|
|
|
|
if not news_rows:
|
|
payload = await asyncio.to_thread(
|
|
news_domain.get_news_for_date,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
date=trade_date,
|
|
limit=limit,
|
|
)
|
|
news_rows = payload.get("news") or []
|
|
source = "market_store"
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_for_date_loaded",
|
|
"ticker": ticker,
|
|
"date": trade_date,
|
|
"news": news_rows,
|
|
"source": source,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_news_timeline(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_timeline_loaded",
|
|
"ticker": "",
|
|
"timeline": [],
|
|
"error": "invalid ticker",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
lookback_days = data.get("lookback_days", 90)
|
|
try:
|
|
lookback_days = max(7, min(int(lookback_days), 365))
|
|
except (TypeError, ValueError):
|
|
lookback_days = 90
|
|
|
|
end_date = gateway.state_sync.state.get("current_date") or datetime.now().strftime("%Y-%m-%d")
|
|
try:
|
|
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
|
except ValueError:
|
|
end_dt = datetime.now()
|
|
end_date = end_dt.strftime("%Y-%m-%d")
|
|
start_date = (end_dt - timedelta(days=lookback_days)).strftime("%Y-%m-%d")
|
|
|
|
timeline = []
|
|
response = await gateway._call_news_service(
|
|
"get_news_timeline",
|
|
lambda client: client.get_news_timeline(ticker=ticker, start_date=start_date, end_date=end_date),
|
|
)
|
|
if response is not None:
|
|
timeline = response.get("timeline") or []
|
|
|
|
if not timeline:
|
|
payload = await asyncio.to_thread(
|
|
news_domain.get_news_timeline,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
)
|
|
timeline = payload.get("timeline") or []
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_timeline_loaded",
|
|
"ticker": ticker,
|
|
"timeline": timeline,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_news_categories(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_categories_loaded",
|
|
"ticker": "",
|
|
"categories": {},
|
|
"error": "invalid ticker",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
lookback_days = data.get("lookback_days", 90)
|
|
try:
|
|
lookback_days = max(7, min(int(lookback_days), 365))
|
|
except (TypeError, ValueError):
|
|
lookback_days = 90
|
|
|
|
end_date = gateway.state_sync.state.get("current_date") or datetime.now().strftime("%Y-%m-%d")
|
|
try:
|
|
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
|
except ValueError:
|
|
end_dt = datetime.now()
|
|
end_date = end_dt.strftime("%Y-%m-%d")
|
|
start_date = (end_dt - timedelta(days=lookback_days)).strftime("%Y-%m-%d")
|
|
|
|
categories = {}
|
|
response = await gateway._call_news_service(
|
|
"get_categories",
|
|
lambda client: client.get_categories(
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=200,
|
|
),
|
|
)
|
|
if response is not None:
|
|
categories = response.get("categories") or {}
|
|
|
|
if not categories:
|
|
payload = await asyncio.to_thread(
|
|
news_domain.get_news_categories,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=200,
|
|
)
|
|
categories = payload.get("categories") or {}
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_news_categories_loaded",
|
|
"ticker": ticker,
|
|
"categories": categories,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_range_explain(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
start_date = str(data.get("start_date") or "").strip()
|
|
end_date = str(data.get("end_date") or "").strip()
|
|
if not ticker or not start_date or not end_date:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_range_explain_loaded",
|
|
"ticker": ticker,
|
|
"result": {"error": "ticker, start_date, end_date are required"},
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
article_ids = data.get("article_ids")
|
|
result = None
|
|
response = await gateway._call_news_service(
|
|
"get_range_explain",
|
|
lambda client: client.get_range_explain(
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
article_ids=article_ids if isinstance(article_ids, list) else None,
|
|
limit=100,
|
|
),
|
|
)
|
|
if response is not None:
|
|
result = response.get("result")
|
|
|
|
if result is None:
|
|
payload = await asyncio.to_thread(
|
|
news_domain.get_range_explain_payload,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
article_ids=article_ids if isinstance(article_ids, list) else None,
|
|
limit=100,
|
|
refresh_if_stale=False,
|
|
)
|
|
result = payload.get("result")
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_range_explain_loaded",
|
|
"ticker": ticker,
|
|
"result": result,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_insider_trades(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_insider_trades_loaded",
|
|
"ticker": "",
|
|
"trades": [],
|
|
"error": "invalid ticker",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
end_date = str(data.get("end_date") or gateway.state_sync.state.get("current_date") or datetime.now().strftime("%Y-%m-%d")).strip()[:10]
|
|
start_date = str(data.get("start_date") or "").strip()[:10]
|
|
limit = int(data.get("limit", 50))
|
|
|
|
trades = []
|
|
response = await gateway._call_trading_service(
|
|
"get_insider_trades",
|
|
lambda client: client.get_insider_trades(
|
|
ticker=ticker,
|
|
end_date=end_date,
|
|
start_date=start_date if start_date else None,
|
|
limit=limit,
|
|
),
|
|
)
|
|
if response is not None:
|
|
trades = response.insider_trades
|
|
|
|
if not trades:
|
|
payload = await asyncio.to_thread(
|
|
trading_domain.get_insider_trades_payload,
|
|
ticker=ticker,
|
|
end_date=end_date,
|
|
start_date=start_date if start_date else None,
|
|
limit=limit,
|
|
)
|
|
trades = payload.get("insider_trades") or []
|
|
|
|
sorted_trades = sorted(trades, key=lambda t: t.transaction_date or "", reverse=True)
|
|
formatted_trades = [{
|
|
"ticker": t.ticker,
|
|
"name": t.name,
|
|
"title": t.title,
|
|
"is_board_director": t.is_board_director,
|
|
"transaction_date": t.transaction_date,
|
|
"transaction_shares": t.transaction_shares,
|
|
"transaction_price_per_share": t.transaction_price_per_share,
|
|
"transaction_value": t.transaction_value,
|
|
"shares_owned_before_transaction": t.shares_owned_before_transaction,
|
|
"shares_owned_after_transaction": t.shares_owned_after_transaction,
|
|
"security_title": t.security_title,
|
|
"filing_date": t.filing_date,
|
|
"holding_change": (
|
|
(t.shares_owned_after_transaction or 0) - (t.shares_owned_before_transaction or 0)
|
|
if t.shares_owned_after_transaction and t.shares_owned_before_transaction else None
|
|
),
|
|
"is_buy": ((t.transaction_shares or 0) > 0) if t.transaction_shares is not None else None,
|
|
} for t in sorted_trades]
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_insider_trades_loaded",
|
|
"ticker": ticker,
|
|
"start_date": start_date or None,
|
|
"end_date": end_date,
|
|
"trades": formatted_trades,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_story(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_story_loaded",
|
|
"ticker": "",
|
|
"story": "",
|
|
"error": "invalid ticker",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
as_of_date = str(data.get("as_of_date") or gateway.state_sync.state.get("current_date") or datetime.now().strftime("%Y-%m-%d")).strip()[:10]
|
|
result = await gateway._call_news_service(
|
|
"get_story",
|
|
lambda client: client.get_story(ticker=ticker, as_of_date=as_of_date),
|
|
)
|
|
if result is None:
|
|
result = await asyncio.to_thread(
|
|
news_domain.get_story_payload,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
as_of_date=as_of_date,
|
|
)
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_story_loaded",
|
|
"ticker": ticker,
|
|
"as_of_date": as_of_date,
|
|
"story": result.get("story") or "",
|
|
"source": result.get("source") or "local",
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_similar_days(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
target_date = str(data.get("date") or "").strip()[:10]
|
|
if not ticker or not target_date:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_similar_days_loaded",
|
|
"ticker": ticker,
|
|
"date": target_date,
|
|
"items": [],
|
|
"error": "ticker and date are required",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
top_k = data.get("top_k", 8)
|
|
try:
|
|
top_k = max(1, min(int(top_k), 20))
|
|
except (TypeError, ValueError):
|
|
top_k = 8
|
|
|
|
result = await gateway._call_news_service(
|
|
"get_similar_days",
|
|
lambda client: client.get_similar_days(ticker=ticker, date=target_date, n_similar=top_k),
|
|
)
|
|
if result is None:
|
|
result = await asyncio.to_thread(
|
|
news_domain.get_similar_days_payload,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
date=target_date,
|
|
n_similar=top_k,
|
|
)
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_similar_days_loaded",
|
|
"ticker": ticker,
|
|
"date": target_date,
|
|
**result,
|
|
}, ensure_ascii=False, default=str))
|
|
|
|
|
|
async def handle_get_stock_technical_indicators(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
if not ticker:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_technical_indicators_loaded",
|
|
"ticker": ticker,
|
|
"indicators": None,
|
|
"error": "ticker is required",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
try:
|
|
end_date = datetime.now()
|
|
start_date = end_date - timedelta(days=250)
|
|
|
|
prices = None
|
|
response = await gateway._call_trading_service(
|
|
"get_prices",
|
|
lambda client: client.get_prices(
|
|
ticker=ticker,
|
|
start_date=start_date.strftime("%Y-%m-%d"),
|
|
end_date=end_date.strftime("%Y-%m-%d"),
|
|
),
|
|
)
|
|
if response is not None:
|
|
prices = response.prices
|
|
|
|
if prices is None:
|
|
payload = trading_domain.get_prices_payload(
|
|
ticker=ticker,
|
|
start_date=start_date.strftime("%Y-%m-%d"),
|
|
end_date=end_date.strftime("%Y-%m-%d"),
|
|
)
|
|
prices = payload.get("prices") or []
|
|
|
|
if not prices or len(prices) < 20:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_technical_indicators_loaded",
|
|
"ticker": ticker,
|
|
"indicators": None,
|
|
"error": "Insufficient price data",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
df = prices_to_df(prices)
|
|
signal = gateway._technical_analyzer.analyze(ticker, df)
|
|
|
|
import pandas as pd
|
|
df_sorted = df.sort_values("time").reset_index(drop=True)
|
|
df_sorted["returns"] = df_sorted["close"].pct_change()
|
|
vol_10 = float(df_sorted["returns"].tail(10).std() * (252**0.5) * 100) if len(df_sorted) >= 10 else None
|
|
vol_20 = float(df_sorted["returns"].tail(20).std() * (252**0.5) * 100) if len(df_sorted) >= 20 else None
|
|
vol_60 = float(df_sorted["returns"].tail(60).std() * (252**0.5) * 100) if len(df_sorted) >= 60 else None
|
|
ma_distance = {}
|
|
for ma_key in ["ma5", "ma10", "ma20", "ma50", "ma200"]:
|
|
ma_value = getattr(signal, ma_key, None)
|
|
ma_distance[ma_key] = ((signal.current_price - ma_value) / ma_value) * 100 if ma_value and ma_value > 0 else None
|
|
|
|
indicators = {
|
|
"ticker": ticker,
|
|
"current_price": signal.current_price,
|
|
"ma": {
|
|
"ma5": signal.ma5,
|
|
"ma10": signal.ma10,
|
|
"ma20": signal.ma20,
|
|
"ma50": signal.ma50,
|
|
"ma200": signal.ma200,
|
|
"distance": ma_distance,
|
|
},
|
|
"rsi": {
|
|
"rsi14": signal.rsi14,
|
|
"status": "oversold" if signal.rsi14 < 30 else "overbought" if signal.rsi14 > 70 else "neutral",
|
|
},
|
|
"macd": {
|
|
"macd": signal.macd,
|
|
"signal": signal.macd_signal,
|
|
"histogram": signal.macd - signal.macd_signal,
|
|
},
|
|
"bollinger": {
|
|
"upper": signal.bollinger_upper,
|
|
"mid": signal.bollinger_mid,
|
|
"lower": signal.bollinger_lower,
|
|
},
|
|
"volatility": {
|
|
"vol_10d": vol_10,
|
|
"vol_20d": vol_20,
|
|
"vol_60d": vol_60,
|
|
"annualized": signal.annualized_volatility_pct,
|
|
"risk_level": signal.risk_level,
|
|
},
|
|
"trend": signal.trend,
|
|
"mean_reversion": signal.mean_reversion_signal,
|
|
}
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_technical_indicators_loaded",
|
|
"ticker": ticker,
|
|
"indicators": indicators,
|
|
}, ensure_ascii=False, default=str))
|
|
except Exception as exc:
|
|
logger.exception("Error getting technical indicators for %s", ticker)
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_technical_indicators_loaded",
|
|
"ticker": ticker,
|
|
"indicators": None,
|
|
"error": str(exc),
|
|
}, ensure_ascii=False))
|
|
|
|
|
|
async def handle_run_stock_enrich(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
|
ticker = normalize_symbol(data.get("ticker", ""))
|
|
start_date = str(data.get("start_date") or "").strip()[:10]
|
|
end_date = str(data.get("end_date") or "").strip()[:10]
|
|
story_date = str(data.get("story_date") or end_date or "").strip()[:10]
|
|
target_date = str(data.get("target_date") or "").strip()[:10]
|
|
force = bool(data.get("force", False))
|
|
rebuild_story = bool(data.get("rebuild_story", True))
|
|
rebuild_similar_days = bool(data.get("rebuild_similar_days", True))
|
|
only_local_to_llm = bool(data.get("only_local_to_llm", False))
|
|
limit = data.get("limit", 200)
|
|
|
|
try:
|
|
limit = max(10, min(int(limit), 500))
|
|
except (TypeError, ValueError):
|
|
limit = 200
|
|
|
|
if not ticker or not start_date or not end_date:
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_enrich_completed",
|
|
"ticker": ticker,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
"error": "ticker, start_date, end_date are required",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
if only_local_to_llm and not llm_enrichment_enabled():
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_enrich_completed",
|
|
"ticker": ticker,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
"error": "only_local_to_llm requires EXPLAIN_ENRICH_USE_LLM=true and a configured LLM provider",
|
|
}, ensure_ascii=False))
|
|
return
|
|
|
|
result = await asyncio.to_thread(
|
|
enrich_news_for_symbol,
|
|
gateway.storage.market_store,
|
|
ticker,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=limit,
|
|
skip_existing=not force,
|
|
only_reanalyze_local=only_local_to_llm,
|
|
)
|
|
|
|
story_status = None
|
|
if rebuild_story and story_date:
|
|
await asyncio.to_thread(gateway.storage.market_store.delete_story_cache, ticker, as_of_date=story_date)
|
|
story_result = await asyncio.to_thread(
|
|
news_domain.get_story_payload,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
as_of_date=story_date,
|
|
)
|
|
story_status = {"as_of_date": story_date, "source": story_result.get("source") or "local"}
|
|
|
|
similar_status = None
|
|
if rebuild_similar_days and target_date:
|
|
await asyncio.to_thread(gateway.storage.market_store.delete_similar_day_cache, ticker, target_date=target_date)
|
|
similar_result = await asyncio.to_thread(
|
|
news_domain.get_similar_days_payload,
|
|
gateway.storage.market_store,
|
|
ticker=ticker,
|
|
date=target_date,
|
|
n_similar=8,
|
|
)
|
|
similar_status = {
|
|
"target_date": target_date,
|
|
"count": len(similar_result.get("items") or []),
|
|
"error": similar_result.get("error"),
|
|
}
|
|
|
|
await websocket.send(json.dumps({
|
|
"type": "stock_enrich_completed",
|
|
"ticker": ticker,
|
|
"start_date": start_date,
|
|
"end_date": end_date,
|
|
"story_date": story_date or None,
|
|
"target_date": target_date or None,
|
|
"force": force,
|
|
"only_local_to_llm": only_local_to_llm,
|
|
"stats": result,
|
|
"story_status": story_status,
|
|
"similar_status": similar_status,
|
|
}, ensure_ascii=False, default=str))
|