Add explain analysis workflow and UI

This commit is contained in:
2026-03-16 22:28:41 +08:00
parent 3a5558b576
commit 1f5ee3698e
49 changed files with 8888 additions and 1476 deletions

View File

@@ -12,7 +12,7 @@ import os
import shutil
import subprocess
import sys
from datetime import datetime
from datetime import datetime, timedelta
from pathlib import Path
from typing import Optional
from zoneinfo import ZoneInfo
@@ -21,18 +21,27 @@ import typer
from rich.console import Console
from rich.panel import Panel
from rich.prompt import Confirm
from rich.table import Table
from dotenv import load_dotenv
from backend.agents.prompt_loader import PromptLoader
from backend.agents.workspace_manager import WorkspaceManager
from backend.data.market_ingest import ingest_symbols
from backend.data.market_store import MarketStore
from backend.enrich.llm_enricher import get_explain_model_info, llm_enrichment_enabled
from backend.enrich.news_enricher import enrich_symbols
app = typer.Typer(
name="evotraders",
help="EvoTraders: A self-evolving multi-agent trading system",
add_completion=False,
)
ingest_app = typer.Typer(help="Ingest Polygon market data into the research warehouse.")
app.add_typer(ingest_app, name="ingest")
console = Console()
_prompt_loader = PromptLoader()
load_dotenv()
def get_project_root() -> Path:
@@ -204,6 +213,189 @@ def initialize_workspace(config_name: str) -> Path:
return workspace_manager.get_run_dir(config_name)
def _resolve_symbols(raw_tickers: Optional[str], config_name: Optional[str] = None) -> list[str]:
"""Resolve symbols from explicit input or runtime bootstrap config."""
if raw_tickers and raw_tickers.strip():
return [
item.strip().upper()
for item in raw_tickers.split(",")
if item.strip()
]
workspace_manager = WorkspaceManager(project_root=get_project_root())
bootstrap_path = workspace_manager.get_run_dir(config_name or "default") / "BOOTSTRAP.md"
if bootstrap_path.exists():
content = bootstrap_path.read_text(encoding="utf-8")
for line in content.splitlines():
if line.strip().startswith("tickers:"):
raw = line.split(":", 1)[1]
return [
item.strip().upper()
for item in raw.split(",")
if item.strip()
]
return []
def _filter_problematic_report_rows(rows: list[dict]) -> list[dict]:
"""Keep tickers with incomplete coverage or without any LLM-enriched rows."""
return [
row
for row in rows
if float(row.get("coverage_pct") or 0.0) < 100.0
or int(row.get("llm_count") or 0) == 0
]
def auto_update_market_store(
config_name: str,
*,
end_date: Optional[str] = None,
) -> None:
"""Refresh the long-lived Polygon market store for the active watchlist."""
api_key = os.getenv("POLYGON_API_KEY", "").strip()
if not api_key:
console.print(
"[dim]Skipping Polygon market store update: POLYGON_API_KEY not set[/dim]",
)
return
symbols = _resolve_symbols(None, config_name)
if not symbols:
console.print(
f"[dim]Skipping Polygon market store update: no tickers found for config '{config_name}'[/dim]",
)
return
target_end = end_date or datetime.now().date().isoformat()
console.print(
f"[cyan]Updating Polygon market store for {', '.join(symbols)} -> {target_end}[/cyan]",
)
try:
results = ingest_symbols(
symbols,
mode="incremental",
end_date=target_end,
)
except Exception as exc:
console.print(
f"[yellow]Polygon market store update failed, continuing startup: {exc}[/yellow]",
)
return
for result in results:
console.print(
"[green]"
f"{result['symbol']}"
"[/green] "
f"prices={result['prices']} news={result['news']} aligned={result['aligned']}"
)
def auto_prepare_backtest_market_store(
config_name: str,
*,
start_date: str,
end_date: str,
) -> None:
"""Ensure the market store has the requested backtest window for the active watchlist."""
api_key = os.getenv("POLYGON_API_KEY", "").strip()
if not api_key:
console.print(
"[dim]Skipping Polygon backtest preload: POLYGON_API_KEY not set[/dim]",
)
return
symbols = _resolve_symbols(None, config_name)
if not symbols:
console.print(
f"[dim]Skipping Polygon backtest preload: no tickers found for config '{config_name}'[/dim]",
)
return
console.print(
f"[cyan]Preparing Polygon market store for backtest {start_date} -> {end_date} "
f"({', '.join(symbols)})[/cyan]",
)
try:
results = ingest_symbols(
symbols,
mode="full",
start_date=start_date,
end_date=end_date,
)
except Exception as exc:
console.print(
f"[yellow]Polygon backtest preload failed, continuing startup: {exc}[/yellow]",
)
return
for result in results:
console.print(
"[green]"
f"{result['symbol']}"
"[/green] "
f"prices={result['prices']} news={result['news']} aligned={result['aligned']}"
)
def auto_enrich_market_store(
config_name: str,
*,
end_date: Optional[str] = None,
lookback_days: int = 120,
force: bool = False,
) -> None:
"""Refresh explain-oriented enriched news for the active watchlist."""
symbols = _resolve_symbols(None, config_name)
if not symbols:
console.print(
f"[dim]Skipping explain enrich: no tickers found for config '{config_name}'[/dim]",
)
return
target_end = end_date or datetime.now().date().isoformat()
try:
end_dt = datetime.strptime(target_end, "%Y-%m-%d")
except ValueError:
console.print(
f"[yellow]Skipping explain enrich: invalid end date {target_end}[/yellow]",
)
return
start_date = (end_dt - timedelta(days=max(1, lookback_days))).date().isoformat()
console.print(
f"[cyan]Refreshing explain enrich for {', '.join(symbols)} -> {target_end}[/cyan]",
)
store = MarketStore()
try:
results = enrich_symbols(
store,
symbols,
start_date=start_date,
end_date=target_end,
limit=300,
skip_existing=not force,
)
except Exception as exc:
console.print(
f"[yellow]Explain enrich failed, continuing startup: {exc}[/yellow]",
)
return
for result in results:
console.print(
"[green]"
f"{result['symbol']}"
"[/green] "
f"news={result['news_count']} queued={result['queued_count']} analyzed={result['analyzed']} "
f"skipped={result['skipped_existing_count']} deduped={result['deduped_count']} "
f"llm={result['llm_count']} local={result['local_count']}"
)
@app.command("init-workspace")
def init_workspace(
config_name: str = typer.Option(
@@ -223,6 +415,213 @@ def init_workspace(
)
@ingest_app.command("full")
def ingest_full(
tickers: Optional[str] = typer.Option(
None,
"--tickers",
"-t",
help="Comma-separated tickers to ingest",
),
start: Optional[str] = typer.Option(
None,
"--start",
help="Start date for full ingestion (YYYY-MM-DD)",
),
end: Optional[str] = typer.Option(
None,
"--end",
help="End date for ingestion (YYYY-MM-DD)",
),
config_name: str = typer.Option(
"default",
"--config-name",
"-c",
help="Fallback config to read tickers from BOOTSTRAP.md",
),
):
"""Run full Polygon ingestion for the specified symbols."""
symbols = _resolve_symbols(tickers, config_name)
if not symbols:
console.print("[red]No tickers provided and none found in BOOTSTRAP.md[/red]")
raise typer.Exit(1)
console.print(f"[cyan]Starting full Polygon ingest for {', '.join(symbols)}[/cyan]")
results = ingest_symbols(symbols, mode="full", start_date=start, end_date=end)
for result in results:
console.print(
f"[green]{result['symbol']}[/green] prices={result['prices']} news={result['news']} aligned={result['aligned']}"
)
@ingest_app.command("update")
def ingest_update(
tickers: Optional[str] = typer.Option(
None,
"--tickers",
"-t",
help="Comma-separated tickers to update",
),
end: Optional[str] = typer.Option(
None,
"--end",
help="Optional end date override (YYYY-MM-DD)",
),
config_name: str = typer.Option(
"default",
"--config-name",
"-c",
help="Fallback config to read tickers from BOOTSTRAP.md",
),
):
"""Run incremental Polygon ingestion using stored watermarks."""
symbols = _resolve_symbols(tickers, config_name)
if not symbols:
console.print("[red]No tickers provided and none found in BOOTSTRAP.md[/red]")
raise typer.Exit(1)
console.print(f"[cyan]Starting incremental Polygon ingest for {', '.join(symbols)}[/cyan]")
results = ingest_symbols(symbols, mode="incremental", end_date=end)
for result in results:
console.print(
f"[green]{result['symbol']}[/green] prices={result['prices']} news={result['news']} aligned={result['aligned']}"
)
@ingest_app.command("enrich")
def ingest_enrich(
tickers: Optional[str] = typer.Option(
None,
"--tickers",
"-t",
help="Comma-separated tickers to enrich",
),
start: Optional[str] = typer.Option(
None,
"--start",
help="Optional start date for enrichment window (YYYY-MM-DD)",
),
end: Optional[str] = typer.Option(
None,
"--end",
help="Optional end date for enrichment window (YYYY-MM-DD)",
),
limit: int = typer.Option(
300,
"--limit",
help="Maximum raw news rows per ticker to analyze",
),
force: bool = typer.Option(
False,
"--force",
help="Re-analyze already enriched news instead of only missing rows",
),
config_name: str = typer.Option(
"default",
"--config-name",
"-c",
help="Fallback config to read tickers from BOOTSTRAP.md",
),
):
"""Run explain-oriented news enrichment for symbols already in the market store."""
symbols = _resolve_symbols(tickers, config_name)
if not symbols:
console.print("[red]No tickers provided and none found in BOOTSTRAP.md[/red]")
raise typer.Exit(1)
console.print(f"[cyan]Starting explain enrich for {', '.join(symbols)}[/cyan]")
store = MarketStore()
results = enrich_symbols(
store,
symbols,
start_date=start,
end_date=end,
limit=max(10, limit),
skip_existing=not force,
)
for result in results:
console.print(
f"[green]{result['symbol']}[/green] "
f"news={result['news_count']} queued={result['queued_count']} analyzed={result['analyzed']} "
f"skipped={result['skipped_existing_count']} deduped={result['deduped_count']} "
f"llm={result['llm_count']} local={result['local_count']}"
)
@ingest_app.command("report")
def ingest_report(
tickers: Optional[str] = typer.Option(
None,
"--tickers",
"-t",
help="Optional comma-separated tickers to report",
),
start: Optional[str] = typer.Option(
None,
"--start",
help="Optional start date for report window (YYYY-MM-DD)",
),
end: Optional[str] = typer.Option(
None,
"--end",
help="Optional end date for report window (YYYY-MM-DD)",
),
config_name: str = typer.Option(
"default",
"--config-name",
"-c",
help="Fallback config to read tickers from BOOTSTRAP.md",
),
only_problematic: bool = typer.Option(
False,
"--only-problematic",
help="Only show tickers with incomplete coverage or no LLM-enriched news",
),
):
"""Show explain enrichment coverage and freshness per ticker."""
symbols = _resolve_symbols(tickers, config_name)
store = MarketStore()
report_rows = store.get_enrich_report(
symbols=symbols or None,
start_date=start,
end_date=end,
)
if only_problematic:
report_rows = _filter_problematic_report_rows(report_rows)
if not report_rows:
if only_problematic:
console.print("[green]No problematic enrich report rows found for the requested scope[/green]")
else:
console.print("[yellow]No enrich report rows found for the requested scope[/yellow]")
raise typer.Exit(0)
model_info = get_explain_model_info()
model_label = model_info["label"] if llm_enrichment_enabled() else "disabled"
table = Table(title="Explain Enrichment Report")
table.add_column("Ticker", style="cyan")
table.add_column("Raw News", justify="right")
table.add_column("Analyzed", justify="right")
table.add_column("Coverage", justify="right")
table.add_column("LLM", justify="right")
table.add_column("Local", justify="right")
table.add_column("Latest Trade Date")
table.add_column("Latest Analysis")
table.caption = f"Explain LLM: {model_label}"
for row in report_rows:
table.add_row(
row["symbol"],
str(row["raw_news_count"]),
str(row["analyzed_news_count"]),
f'{row["coverage_pct"]:.1f}%',
str(row["llm_count"]),
str(row["local_count"]),
str(row["latest_trade_date"] or "-"),
str(row["latest_analysis_at"] or "-"),
)
console.print(table)
@app.command()
def backtest(
start: Optional[str] = typer.Option(
@@ -331,6 +730,16 @@ def backtest(
# Run data updater
run_data_updater(project_root)
auto_prepare_backtest_market_store(
config_name,
start_date=start,
end_date=end,
)
auto_enrich_market_store(
config_name,
end_date=end,
force=False,
)
# Build command using backend.main
cmd = [
@@ -514,6 +923,15 @@ def live(
# Data update (if not mock mode)
if not mock:
run_data_updater(project_root)
auto_update_market_store(
config_name,
end_date=nyse_now.date().isoformat(),
)
auto_enrich_market_store(
config_name,
end_date=nyse_now.date().isoformat(),
force=False,
)
else:
console.print(
"\n[dim]Mock mode enabled - skipping data update[/dim]\n",

View File

@@ -47,6 +47,10 @@ class StateSync:
"""Set current simulation date for backtest-compatible timestamps"""
self._simulation_date = date
def clear_simulation_date(self):
"""Disable backtest timestamp simulation and use wall-clock time."""
self._simulation_date = None
def _get_timestamp_ms(self) -> int:
"""
Get timestamp in milliseconds.
@@ -97,12 +101,24 @@ class StateSync:
if not self._enabled:
return
# Ensure timestamp exists (use simulation date if in backtest mode)
# Ensure timestamp exists. Prefer explicit millisecond timestamps so
# frontend displays local wall time correctly instead of date-only UTC.
if "timestamp" not in event:
ts_ms = event.get("ts")
if ts_ms is not None:
try:
event["timestamp"] = datetime.fromtimestamp(
float(ts_ms) / 1000.0,
).isoformat()
except (TypeError, ValueError, OSError):
if self._simulation_date:
event["timestamp"] = f"{self._simulation_date}"
else:
event["timestamp"] = datetime.now().isoformat()
elif self._simulation_date:
event["timestamp"] = f"{self._simulation_date}"
else:
event["timestamp"] = datetime.now().isoformat()
# Persist to feed_history
if persist:
@@ -238,9 +254,12 @@ class StateSync:
"""Called at start of trading cycle"""
self._state["current_date"] = date
self._state["status"] = "running"
if self._state.get("server_mode") == "backtest":
self.set_simulation_date(
date,
) # Set for backtest-compatible timestamps
else:
self.clear_simulation_date()
await self.emit(
{

View File

@@ -7,6 +7,7 @@ from datetime import datetime
from typing import Callable, Dict, List, Optional
import pandas as pd
from backend.data.market_store import MarketStore
from backend.data.provider_utils import normalize_symbol
from backend.data.provider_router import get_provider_router
@@ -26,6 +27,7 @@ class HistoricalPriceManager:
self.close_prices = {}
self.running = False
self._router = get_provider_router()
self._market_store = MarketStore()
def subscribe(
self,
@@ -58,21 +60,48 @@ class HistoricalPriceManager:
logger.warning(f"Failed to load CSV for {symbol}: {e}")
return None
def _load_from_market_db(
self,
symbol: str,
start_date: str,
end_date: str,
) -> Optional[pd.DataFrame]:
"""Load price data from the long-lived market research database."""
try:
rows = self._market_store.get_ohlc(symbol, start_date, end_date)
if not rows:
return None
df = pd.DataFrame(rows)
if df.empty or "date" not in df.columns:
return None
df["Date"] = pd.to_datetime(df["date"])
df.set_index("Date", inplace=True)
df.sort_index(inplace=True)
return df
except Exception as e:
logger.warning(f"Failed to load market DB data for {symbol}: {e}")
return None
def preload_data(self, start_date: str, end_date: str):
"""Preload historical data from local CSV files."""
"""Preload historical data from market DB first, then local CSV."""
logger.info(f"Preloading data: {start_date} to {end_date}")
for symbol in self.subscribed_symbols:
if symbol in self._price_cache:
continue
# Load from local CSV file directly
df = self._load_from_market_db(symbol, start_date, end_date)
if df is not None and not df.empty:
self._price_cache[symbol] = df
logger.info(f"Loaded {symbol} from market DB: {len(df)} records")
continue
df = self._load_from_csv(symbol)
if df is not None and not df.empty:
self._price_cache[symbol] = df
logger.info(f"Loaded {symbol} from CSV: {len(df)} records")
else:
logger.warning(f"No CSV data for {symbol}")
logger.warning(f"No market DB or CSV data for {symbol}")
def set_date(self, date: str):
"""Set current trading date and update prices"""

View File

@@ -0,0 +1,149 @@
# -*- coding: utf-8 -*-
"""Ingest Polygon market data into the long-lived research warehouse."""
from __future__ import annotations
from datetime import datetime, timedelta, timezone
from typing import Iterable
from backend.data.market_store import MarketStore
from backend.data.news_alignment import align_news_for_symbol
from backend.data.polygon_client import (
fetch_news,
fetch_ohlc,
fetch_ticker_details,
)
from backend.data.provider_utils import normalize_symbol
def _today_utc() -> str:
return datetime.now(timezone.utc).date().isoformat()
def _default_start(years: int = 2) -> str:
return (datetime.now(timezone.utc).date() - timedelta(days=years * 366)).isoformat()
def ingest_ticker_history(
symbol: str,
*,
start_date: str | None = None,
end_date: str | None = None,
store: MarketStore | None = None,
) -> dict:
"""Fetch and persist Polygon OHLC + news for a ticker."""
ticker = normalize_symbol(symbol)
start = start_date or _default_start()
end = end_date or _today_utc()
market_store = store or MarketStore()
details = fetch_ticker_details(ticker)
market_store.upsert_ticker(
symbol=ticker,
name=details.get("name"),
sector=details.get("sic_description"),
is_active=bool(details.get("active", True)),
)
ohlc_rows = fetch_ohlc(ticker, start, end)
news_rows = fetch_news(ticker, start, end)
price_count = market_store.upsert_ohlc(ticker, ohlc_rows, source="polygon")
news_count = market_store.upsert_news(ticker, news_rows, source="polygon")
aligned_count = align_news_for_symbol(market_store, ticker)
market_store.update_fetch_watermark(symbol=ticker, price_date=end, news_date=end)
return {
"symbol": ticker,
"start_date": start,
"end_date": end,
"prices": price_count,
"news": news_count,
"aligned": aligned_count,
}
def update_ticker_incremental(
symbol: str,
*,
end_date: str | None = None,
store: MarketStore | None = None,
) -> dict:
"""Incrementally fetch OHLC + news since the last watermark."""
ticker = normalize_symbol(symbol)
market_store = store or MarketStore()
watermarks = market_store.get_ticker_watermarks(ticker)
end = end_date or _today_utc()
start_prices = (
(datetime.fromisoformat(watermarks["last_price_fetch"]) + timedelta(days=1)).date().isoformat()
if watermarks.get("last_price_fetch")
else _default_start()
)
start_news = (
(datetime.fromisoformat(watermarks["last_news_fetch"]) + timedelta(days=1)).date().isoformat()
if watermarks.get("last_news_fetch")
else _default_start()
)
details = fetch_ticker_details(ticker)
market_store.upsert_ticker(
symbol=ticker,
name=details.get("name"),
sector=details.get("sic_description"),
is_active=bool(details.get("active", True)),
)
ohlc_rows = [] if start_prices > end else fetch_ohlc(ticker, start_prices, end)
news_rows = [] if start_news > end else fetch_news(ticker, start_news, end)
price_count = market_store.upsert_ohlc(ticker, ohlc_rows, source="polygon") if ohlc_rows else 0
news_count = market_store.upsert_news(ticker, news_rows, source="polygon") if news_rows else 0
aligned_count = align_news_for_symbol(market_store, ticker)
market_store.update_fetch_watermark(
symbol=ticker,
price_date=end if ohlc_rows or watermarks.get("last_price_fetch") else None,
news_date=end if news_rows or watermarks.get("last_news_fetch") else None,
)
return {
"symbol": ticker,
"start_price_date": start_prices,
"start_news_date": start_news,
"end_date": end,
"prices": price_count,
"news": news_count,
"aligned": aligned_count,
}
def ingest_symbols(
symbols: Iterable[str],
*,
mode: str = "incremental",
start_date: str | None = None,
end_date: str | None = None,
store: MarketStore | None = None,
) -> list[dict]:
"""Fetch Polygon data for a list of tickers."""
market_store = store or MarketStore()
results = []
for symbol in symbols:
ticker = normalize_symbol(symbol)
if not ticker:
continue
if mode == "full":
results.append(
ingest_ticker_history(
ticker,
start_date=start_date,
end_date=end_date,
store=market_store,
)
)
else:
results.append(
update_ticker_incremental(
ticker,
end_date=end_date,
store=market_store,
)
)
return results

1074
backend/data/market_store.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
"""Align persisted news to the nearest NYSE trading date."""
from __future__ import annotations
from datetime import time
import pandas as pd
import pandas_market_calendars as mcal
from backend.data.market_store import MarketStore
NYSE_CALENDAR = mcal.get_calendar("NYSE")
def _next_trading_day(date_str: str) -> str:
start = pd.Timestamp(date_str).tz_localize(None)
sessions = NYSE_CALENDAR.valid_days(
start_date=(start - pd.Timedelta(days=1)).strftime("%Y-%m-%d"),
end_date=(start + pd.Timedelta(days=10)).strftime("%Y-%m-%d"),
)
future = [
pd.Timestamp(day).tz_localize(None).strftime("%Y-%m-%d")
for day in sessions
if pd.Timestamp(day).tz_localize(None) >= start
]
return future[0] if future else date_str
def resolve_trade_date(published_utc: str | None) -> str | None:
"""Map a published timestamp to an NYSE trade date."""
if not published_utc:
return None
timestamp = pd.to_datetime(published_utc, utc=True, errors="coerce")
if pd.isna(timestamp):
return None
nyse_time = timestamp.tz_convert("America/New_York")
candidate = nyse_time.date().isoformat()
valid_days = NYSE_CALENDAR.valid_days(start_date=candidate, end_date=candidate)
if len(valid_days) == 0:
return _next_trading_day(candidate)
if nyse_time.time() >= time(16, 0):
return _next_trading_day((nyse_time + pd.Timedelta(days=1)).date().isoformat())
return candidate
def align_news_for_symbol(store: MarketStore, symbol: str, *, limit: int = 5000) -> int:
"""Fill missing trade_date values for one ticker."""
pending = store.get_news_without_trade_date(symbol, limit=limit)
updates = []
for row in pending:
trade_date = resolve_trade_date(row.get("published_utc"))
if trade_date:
updates.append(
{
"news_id": row["news_id"],
"symbol": row["symbol"],
"trade_date": trade_date,
}
)
if not updates:
return 0
return store.set_trade_dates(updates)

View File

@@ -0,0 +1,161 @@
# -*- coding: utf-8 -*-
"""Polygon client used for long-lived market research ingestion."""
from __future__ import annotations
import os
import time
from datetime import datetime, timezone
from typing import Any, Optional
import requests
BASE = "https://api.polygon.io"
def _headers() -> dict[str, str]:
api_key = os.getenv("POLYGON_API_KEY", "").strip()
if not api_key:
raise ValueError("Missing required API key: POLYGON_API_KEY")
return {"Authorization": f"Bearer {api_key}"}
def http_get(
url: str,
params: Optional[dict[str, Any]] = None,
*,
max_retries: int = 8,
backoff: float = 2.0,
) -> requests.Response:
"""HTTP GET with exponential backoff and 429 handling."""
for attempt in range(max_retries):
try:
response = requests.get(
url,
params=params or {},
headers=_headers(),
timeout=30,
)
except requests.RequestException:
time.sleep((backoff**attempt) + 0.5)
if attempt == max_retries - 1:
raise
continue
if response.status_code == 429:
retry_after = response.headers.get("Retry-After")
wait = (
float(retry_after)
if retry_after and retry_after.isdigit()
else min((backoff**attempt) + 1.0, 60.0)
)
time.sleep(wait)
if attempt == max_retries - 1:
response.raise_for_status()
continue
if 500 <= response.status_code < 600:
time.sleep(min((backoff**attempt) + 1.0, 60.0))
if attempt == max_retries - 1:
response.raise_for_status()
continue
response.raise_for_status()
return response
raise RuntimeError("Unreachable")
def fetch_ticker_details(symbol: str) -> dict[str, Any]:
"""Fetch company metadata from Polygon."""
response = http_get(f"{BASE}/v3/reference/tickers/{symbol}")
return response.json().get("results", {}) or {}
def fetch_ohlc(symbol: str, start_date: str, end_date: str) -> list[dict[str, Any]]:
"""Fetch daily OHLC data from Polygon."""
response = http_get(
f"{BASE}/v2/aggs/ticker/{symbol}/range/1/day/{start_date}/{end_date}",
params={"adjusted": "true", "sort": "asc", "limit": 50000},
)
results = response.json().get("results") or []
rows: list[dict[str, Any]] = []
for item in results:
rows.append(
{
"date": datetime.fromtimestamp(
int(item["t"]) / 1000,
tz=timezone.utc,
).date().isoformat(),
"open": item.get("o"),
"high": item.get("h"),
"low": item.get("l"),
"close": item.get("c"),
"volume": item.get("v"),
"vwap": item.get("vw"),
"transactions": item.get("n"),
}
)
return rows
def fetch_news(
symbol: str,
start_date: str,
end_date: str,
*,
per_page: int = 50,
page_sleep: float = 1.2,
max_pages: Optional[int] = None,
) -> list[dict[str, Any]]:
"""Fetch all Polygon news for a ticker, with pagination."""
url = f"{BASE}/v2/reference/news"
params = {
"ticker": symbol,
"published_utc.gte": start_date,
"published_utc.lte": end_date,
"limit": per_page,
"order": "asc",
}
next_url: Optional[str] = None
pages = 0
all_articles: list[dict[str, Any]] = []
seen_ids: set[str] = set()
while True:
response = http_get(next_url or url, params=None if next_url else params)
data = response.json()
results = data.get("results") or []
if not results:
break
for item in results:
article_id = item.get("id")
if article_id and article_id in seen_ids:
continue
all_articles.append(
{
"id": article_id,
"publisher": (item.get("publisher") or {}).get("name"),
"title": item.get("title"),
"author": item.get("author"),
"published_utc": item.get("published_utc"),
"amp_url": item.get("amp_url"),
"article_url": item.get("article_url"),
"tickers": item.get("tickers"),
"description": item.get("description"),
"insights": item.get("insights"),
}
)
if article_id:
seen_ids.add(article_id)
next_url = data.get("next_url")
pages += 1
if max_pages is not None and pages >= max_pages:
break
if not next_url:
break
time.sleep(page_sleep)
return all_articles

View File

@@ -0,0 +1,2 @@
"""News enrichment utilities for explain-oriented market research."""

View File

@@ -0,0 +1,296 @@
# -*- coding: utf-8 -*-
"""Optional AgentScope-backed news enrichment with safe local fallback."""
from __future__ import annotations
import asyncio
from concurrent.futures import ThreadPoolExecutor
from typing import Any
from pydantic import BaseModel, Field
from backend.config.env_config import canonicalize_model_provider, get_env_bool, get_env_str
from backend.llm.models import create_model
class EnrichedNewsItem(BaseModel):
"""Structured output schema for one enriched article."""
id: str = Field(description="The source article id")
relevance: str = Field(description="One of high, medium, low")
sentiment: str = Field(description="One of positive, negative, neutral")
key_discussion: str = Field(description="Concise core discussion")
summary: str = Field(description="Concise factual summary")
reason_growth: str = Field(description="Growth-oriented reason if present")
reason_decrease: str = Field(description="Downside-oriented reason if present")
class EnrichedNewsBatch(BaseModel):
"""Structured output schema for a batch of enriched articles."""
items: list[EnrichedNewsItem]
class RangeAnalysisPayload(BaseModel):
"""Structured output schema for range explanation text."""
summary: str = Field(description="Concise Chinese range summary for the selected window")
trend_analysis: str = Field(description="Concise Chinese trend explanation for the selected window")
bullish_factors: list[str] = Field(description="Top bullish factors in Chinese")
bearish_factors: list[str] = Field(description="Top bearish factors in Chinese")
def get_explain_model_info() -> dict[str, str]:
"""Resolve provider/model used by explain enrichment."""
provider = canonicalize_model_provider(
get_env_str("EXPLAIN_ENRICH_MODEL_PROVIDER")
or get_env_str("MODEL_PROVIDER", "OPENAI"),
)
model_name = get_env_str("EXPLAIN_ENRICH_MODEL_NAME") or get_env_str(
"MODEL_NAME",
"gpt-4o-mini",
)
return {
"provider": provider,
"model_name": model_name,
"label": f"{provider}:{model_name}",
}
def _normalize_enrichment_payload(payload: Any) -> dict[str, Any] | None:
if isinstance(payload, BaseModel):
payload = payload.model_dump()
if not isinstance(payload, dict):
return None
return {
"relevance": str(payload.get("relevance") or "").strip().lower() or None,
"sentiment": str(payload.get("sentiment") or "").strip().lower() or None,
"key_discussion": str(payload.get("key_discussion") or "").strip() or None,
"summary": str(payload.get("summary") or "").strip() or None,
"reason_growth": str(payload.get("reason_growth") or "").strip() or None,
"reason_decrease": str(payload.get("reason_decrease") or "").strip() or None,
"raw_json": payload,
}
def _run_async(coro: Any) -> Any:
"""Run an async AgentScope model call from sync code, even inside a running loop."""
try:
asyncio.get_running_loop()
except RuntimeError:
return asyncio.run(coro)
with ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(asyncio.run, coro)
return future.result()
def _get_explain_model():
"""Create an AgentScope model for explain enrichment."""
model_info = get_explain_model_info()
return create_model(
model_name=model_info["model_name"],
provider=model_info["provider"],
stream=False,
generate_kwargs={"temperature": 0.1},
)
def llm_enrichment_enabled() -> bool:
"""Return whether AgentScope-backed LLM enrichment should be attempted."""
if not get_env_bool("EXPLAIN_ENRICH_USE_LLM", False):
return False
provider = get_explain_model_info()["provider"]
provider_key_map = {
"OPENAI": "OPENAI_API_KEY",
"ANTHROPIC": "ANTHROPIC_API_KEY",
"DASHSCOPE": "DASHSCOPE_API_KEY",
"ALIBABA": "DASHSCOPE_API_KEY",
"GEMINI": "GOOGLE_API_KEY",
"GOOGLE": "GOOGLE_API_KEY",
"DEEPSEEK": "DEEPSEEK_API_KEY",
"GROQ": "GROQ_API_KEY",
"OPENROUTER": "OPENROUTER_API_KEY",
}
env_key = provider_key_map.get(provider)
return bool(get_env_str(env_key)) if env_key else provider == "OLLAMA"
def llm_range_analysis_enabled() -> bool:
"""Return whether LLM range analysis should be attempted."""
raw_value = get_env_str("EXPLAIN_RANGE_USE_LLM")
if raw_value is not None and str(raw_value).strip() != "":
return get_env_bool("EXPLAIN_RANGE_USE_LLM", False) and llm_enrichment_enabled()
return llm_enrichment_enabled()
def analyze_news_row_with_llm(row: dict[str, Any]) -> dict[str, Any] | None:
"""Generate explain-oriented structured analysis for one article."""
if not llm_enrichment_enabled():
return None
model = _get_explain_model()
title = str(row.get("title") or "").strip()
summary = str(row.get("summary") or "").strip()
messages = [
{
"role": "system",
"content": (
"You produce concise structured financial news analysis. "
"Use only the requested fields and keep content factual."
),
},
{
"role": "user",
"content": (
"Analyze this stock-news article for an explain UI.\n"
"Rules:\n"
"- relevance must be one of: high, medium, low\n"
"- sentiment must be one of: positive, negative, neutral\n"
"- keep each text field concise and factual\n"
f"- article id: {str(row.get('id') or '').strip()}\n"
f"Title: {title}\n"
f"Summary: {summary}\n"
),
},
]
try:
response = _run_async(model(messages=messages, structured_model=EnrichedNewsItem))
except Exception:
return None
payload = _normalize_enrichment_payload(getattr(response, "metadata", None))
if payload:
payload.setdefault("raw_json", {})
payload["raw_json"]["model_provider"] = get_explain_model_info()["provider"]
payload["raw_json"]["model_name"] = get_explain_model_info()["model_name"]
payload["raw_json"]["model_label"] = get_explain_model_info()["label"]
return payload
def analyze_news_rows_with_llm(rows: list[dict[str, Any]]) -> dict[str, dict[str, Any]]:
"""Generate structured analysis for multiple articles in one request."""
if not llm_enrichment_enabled() or not rows:
return {}
payload_rows = [
{
"id": str(row.get("id") or "").strip(),
"title": str(row.get("title") or "").strip(),
"summary": str(row.get("summary") or "").strip(),
}
for row in rows
if str(row.get("id") or "").strip()
]
if not payload_rows:
return {}
model = _get_explain_model()
messages = [
{
"role": "system",
"content": (
"You produce concise structured financial news analysis in JSON. "
"Preserve ids exactly and do not invent extra items."
),
},
{
"role": "user",
"content": (
"Analyze these stock-news articles for an explain UI.\n"
"For each item return: id, relevance, sentiment, key_discussion, summary, "
"reason_growth, reason_decrease.\n"
"Rules:\n"
"- relevance must be one of: high, medium, low\n"
"- sentiment must be one of: positive, negative, neutral\n"
"- keep all text concise and factual\n"
f"Articles: {payload_rows}"
),
},
]
try:
response = _run_async(
model(messages=messages, structured_model=EnrichedNewsBatch),
)
except Exception:
return {}
metadata = getattr(response, "metadata", None)
if isinstance(metadata, BaseModel):
metadata = metadata.model_dump()
items = metadata.get("items") if isinstance(metadata, dict) else None
if not isinstance(items, list):
return {}
results: dict[str, dict[str, Any]] = {}
for item in items:
normalized = _normalize_enrichment_payload(item)
news_id = str((item.model_dump() if isinstance(item, BaseModel) else item).get("id") or "").strip() if isinstance(item, (dict, BaseModel)) else ""
if normalized and news_id:
normalized.setdefault("raw_json", {})
normalized["raw_json"]["model_provider"] = get_explain_model_info()["provider"]
normalized["raw_json"]["model_name"] = get_explain_model_info()["model_name"]
normalized["raw_json"]["model_label"] = get_explain_model_info()["label"]
results[news_id] = normalized
return results
def analyze_range_with_llm(payload: dict[str, Any]) -> dict[str, Any] | None:
"""Generate explain-oriented range summary and factor refinement."""
if not llm_range_analysis_enabled():
return None
model = _get_explain_model()
messages = [
{
"role": "system",
"content": (
"You write concise Chinese stock range analysis for an explain UI. "
"Use only the supplied facts. Keep the tone factual and analyst-like."
),
},
{
"role": "user",
"content": (
"请基于给定事实生成区间分析。\n"
"输出字段summary, trend_analysis, bullish_factors, bearish_factors。\n"
"要求:\n"
"- 全部使用简体中文\n"
"- summary 1到2句概括区间走势、新闻密度和主导主题\n"
"- trend_analysis 1句解释区间内部阶段变化\n"
"- bullish_factors 和 bearish_factors 各返回最多3条短句\n"
"- 不要编造未提供的信息\n"
f"事实数据: {payload}"
),
},
]
try:
response = _run_async(
model(messages=messages, structured_model=RangeAnalysisPayload),
)
except Exception:
return None
metadata = getattr(response, "metadata", None)
if isinstance(metadata, BaseModel):
metadata = metadata.model_dump()
if not isinstance(metadata, dict):
return None
return {
"summary": str(metadata.get("summary") or "").strip() or None,
"trend_analysis": str(metadata.get("trend_analysis") or "").strip() or None,
"bullish_factors": [
str(item).strip()
for item in list(metadata.get("bullish_factors") or [])
if str(item).strip()
][:3],
"bearish_factors": [
str(item).strip()
for item in list(metadata.get("bearish_factors") or [])
if str(item).strip()
][:3],
"model_provider": get_explain_model_info()["provider"],
"model_name": get_explain_model_info()["model_name"],
"model_label": get_explain_model_info()["label"],
}

View File

@@ -0,0 +1,362 @@
# -*- coding: utf-8 -*-
"""Lightweight news enrichment for explain-oriented market analysis."""
from __future__ import annotations
import hashlib
from typing import Any
from backend.config.env_config import get_env_int
from backend.enrich.llm_enricher import (
analyze_news_row_with_llm,
analyze_news_rows_with_llm,
llm_enrichment_enabled,
)
from backend.data.market_store import MarketStore
POSITIVE_KEYWORDS = (
"beat", "surge", "gain", "growth", "record", "upgrade", "strong",
"partnership", "approved", "launch", "expands", "profit",
)
NEGATIVE_KEYWORDS = (
"miss", "drop", "fall", "cut", "downgrade", "weak", "warning",
"delay", "lawsuit", "probe", "tariff", "decline", "layoff",
)
HIGH_RELEVANCE_KEYWORDS = (
"earnings", "guidance", "profit", "revenue", "ceo", "fda", "tariff",
"regulation", "acquisition", "buyback", "forecast", "launch",
)
def _dedupe_key(row: dict[str, Any]) -> str:
trade_date = str(row.get("trade_date") or row.get("date") or "")[:10]
title = str(row.get("title") or "").strip().lower()
summary = str(row.get("summary") or "").strip().lower()[:160]
raw = f"{trade_date}::{title}::{summary}"
return hashlib.sha1(raw.encode("utf-8")).hexdigest()
def _chunk_rows(rows: list[dict[str, Any]], size: int) -> list[list[dict[str, Any]]]:
chunk_size = max(1, int(size))
return [rows[index:index + chunk_size] for index in range(0, len(rows), chunk_size)]
def classify_news_row(row: dict[str, Any]) -> dict[str, Any]:
"""Return a lightweight explain-oriented analysis for one article."""
llm_result = analyze_news_row_with_llm(row)
if isinstance(llm_result, dict):
merged = dict(llm_result)
merged.setdefault("summary", str(row.get("summary") or row.get("title") or "")[:280])
merged.setdefault("raw_json", row)
merged["analysis_source"] = "llm"
return merged
title = str(row.get("title") or "").strip()
summary = str(row.get("summary") or "").strip()
text = f"{title} {summary}".lower()
positive_hits = [keyword for keyword in POSITIVE_KEYWORDS if keyword in text]
negative_hits = [keyword for keyword in NEGATIVE_KEYWORDS if keyword in text]
relevance_hits = [keyword for keyword in HIGH_RELEVANCE_KEYWORDS if keyword in text]
if len(positive_hits) > len(negative_hits):
sentiment = "positive"
elif len(negative_hits) > len(positive_hits):
sentiment = "negative"
else:
sentiment = "neutral"
relevance = "high" if relevance_hits else "medium" if title else "low"
summary_text = summary or title
key_discussion = ""
if relevance_hits:
key_discussion = f"核心主题集中在 {', '.join(relevance_hits[:3])}"
elif summary_text:
key_discussion = summary_text[:160]
reason_growth = ""
reason_decrease = ""
if sentiment == "positive":
reason_growth = summary_text[:200]
elif sentiment == "negative":
reason_decrease = summary_text[:200]
return {
"relevance": relevance,
"sentiment": sentiment,
"key_discussion": key_discussion,
"summary": summary_text[:280],
"reason_growth": reason_growth,
"reason_decrease": reason_decrease,
"analysis_source": "local",
"raw_json": row,
}
def attach_forward_returns(
*,
news_rows: list[dict[str, Any]],
ohlc_rows: list[dict[str, Any]],
) -> list[dict[str, Any]]:
"""Attach forward-return labels to each analyzed row."""
if not ohlc_rows:
return news_rows
closes_by_date = {
str(row.get("date")): float(row.get("close"))
for row in ohlc_rows
if row.get("date") is not None and row.get("close") is not None
}
ordered_dates = [str(row.get("date")) for row in ohlc_rows if row.get("date") is not None]
date_index = {date: idx for idx, date in enumerate(ordered_dates)}
horizons = {
"ret_t0": 0,
"ret_t1": 1,
"ret_t3": 3,
"ret_t5": 5,
"ret_t10": 10,
}
enriched: list[dict[str, Any]] = []
for row in news_rows:
trade_date = str(row.get("trade_date") or "")[:10]
base_close = closes_by_date.get(trade_date)
if not trade_date or base_close in (None, 0):
enriched.append(row)
continue
next_row = dict(row)
base_index = date_index.get(trade_date)
if base_index is None:
enriched.append(next_row)
continue
for field, offset in horizons.items():
target_index = base_index + offset
if target_index >= len(ordered_dates):
next_row[field] = None
continue
target_close = closes_by_date.get(ordered_dates[target_index])
next_row[field] = (
(float(target_close) - float(base_close)) / float(base_close)
if target_close not in (None, 0)
else None
)
enriched.append(next_row)
return enriched
def build_analysis_rows(
*,
symbol: str,
news_rows: list[dict[str, Any]],
ohlc_rows: list[dict[str, Any]],
) -> tuple[list[dict[str, Any]], dict[str, int]]:
"""Transform raw news rows into market_store news_analysis payloads plus stats."""
llm_results: dict[str, dict[str, Any]] = {}
if llm_enrichment_enabled():
batch_size = get_env_int("EXPLAIN_ENRICH_BATCH_SIZE", 8)
for chunk in _chunk_rows(news_rows, batch_size):
llm_results.update(analyze_news_rows_with_llm(chunk))
staged_rows: list[dict[str, Any]] = []
seen_dedupe_keys: set[str] = set()
deduped_count = 0
llm_count = 0
local_count = 0
for row in news_rows:
news_id = str(row.get("id") or "").strip()
if not news_id:
continue
dedupe_key = _dedupe_key(row)
if dedupe_key in seen_dedupe_keys:
deduped_count += 1
continue
seen_dedupe_keys.add(dedupe_key)
batch_result = llm_results.get(news_id)
if isinstance(batch_result, dict):
analysis = dict(batch_result)
analysis.setdefault("summary", str(row.get("summary") or row.get("title") or "")[:280])
analysis.setdefault("raw_json", row)
analysis["analysis_source"] = "llm"
llm_count += 1
else:
analysis = classify_news_row(row)
if analysis.get("analysis_source") == "llm":
llm_count += 1
else:
local_count += 1
staged_rows.append(
{
"news_id": news_id,
"trade_date": str(row.get("trade_date") or "")[:10] or None,
**analysis,
}
)
return (
attach_forward_returns(news_rows=staged_rows, ohlc_rows=ohlc_rows),
{
"deduped_count": deduped_count,
"llm_count": llm_count,
"local_count": local_count,
},
)
def enrich_news_for_symbol(
store: MarketStore,
symbol: str,
*,
start_date: str | None = None,
end_date: str | None = None,
limit: int = 200,
analysis_source: str = "local",
skip_existing: bool = True,
only_reanalyze_local: bool = False,
) -> dict[str, Any]:
"""Read raw market news, compute explain fields, and persist them."""
normalized_symbol = str(symbol or "").strip().upper()
if not normalized_symbol:
return {"symbol": "", "analyzed": 0}
news_rows = store.get_news_items(
normalized_symbol,
start_date=start_date,
end_date=end_date,
limit=limit,
)
total_news_count = len(news_rows)
skipped_existing_count = 0
analyzed_sources: dict[str, str] = {}
skipped_missing_analysis_count = 0
skipped_non_local_count = 0
if news_rows and only_reanalyze_local:
analyzed_sources = store.get_analyzed_news_sources(
normalized_symbol,
start_date=start_date,
end_date=end_date,
)
skipped_missing_analysis_count = sum(
1
for row in news_rows
if str(row.get("id") or "").strip() not in analyzed_sources
)
skipped_non_local_count = sum(
1
for row in news_rows
if str(row.get("id") or "").strip() in analyzed_sources
and analyzed_sources.get(str(row.get("id") or "").strip()) != "local"
)
skipped_existing_count = sum(
1
for row in news_rows
if str(row.get("id") or "").strip() not in analyzed_sources
or analyzed_sources.get(str(row.get("id") or "").strip()) != "local"
)
news_rows = [
row for row in news_rows
if analyzed_sources.get(str(row.get("id") or "").strip()) == "local"
]
elif skip_existing and news_rows:
analyzed_ids = store.get_analyzed_news_ids(
normalized_symbol,
start_date=start_date,
end_date=end_date,
)
skipped_existing_count = sum(
1
for row in news_rows
if str(row.get("id") or "").strip() in analyzed_ids
)
news_rows = [
row for row in news_rows
if str(row.get("id") or "").strip() not in analyzed_ids
]
ohlc_start = start_date or (news_rows[-1]["trade_date"] if news_rows and news_rows[-1].get("trade_date") else None)
ohlc_end = end_date or (news_rows[0]["trade_date"] if news_rows and news_rows[0].get("trade_date") else None)
ohlc_rows = (
store.get_ohlc(normalized_symbol, ohlc_start, ohlc_end)
if ohlc_start and ohlc_end
else []
)
analysis_rows, stats = build_analysis_rows(
symbol=normalized_symbol,
news_rows=news_rows,
ohlc_rows=ohlc_rows,
)
analyzed = store.upsert_news_analysis(
normalized_symbol,
analysis_rows,
analysis_source=analysis_source,
)
upgraded_dates = sorted(
{
str(row.get("trade_date") or "")[:10]
for row in analysis_rows
if str(row.get("analysis_source") or "").strip().lower() == "llm"
and str(row.get("trade_date") or "").strip()
}
)
remaining_local_titles = [
str(row.get("title") or row.get("news_id") or "").strip()
for row in news_rows
for analyzed_row in analysis_rows
if str(analyzed_row.get("news_id") or "").strip() == str(row.get("id") or "").strip()
and str(analyzed_row.get("analysis_source") or "").strip().lower() == "local"
][:5]
return {
"symbol": normalized_symbol,
"analyzed": analyzed,
"news_count": total_news_count,
"queued_count": len(news_rows),
"skipped_existing_count": skipped_existing_count,
"deduped_count": stats["deduped_count"],
"llm_count": stats["llm_count"],
"local_count": stats["local_count"],
"only_reanalyze_local": only_reanalyze_local,
"upgraded_local_to_llm_count": (
stats["llm_count"]
if only_reanalyze_local
else 0
),
"execution_summary": {
"upgraded_dates": upgraded_dates[:5],
"remaining_local_titles": remaining_local_titles,
"skipped_missing_analysis_count": skipped_missing_analysis_count,
"skipped_non_local_count": skipped_non_local_count,
},
}
def enrich_symbols(
store: MarketStore,
symbols: list[str],
*,
start_date: str | None = None,
end_date: str | None = None,
limit: int = 200,
analysis_source: str = "local",
skip_existing: bool = True,
only_reanalyze_local: bool = False,
) -> list[dict[str, Any]]:
"""Batch enrich multiple symbols for explain-oriented news analysis."""
results = []
for symbol in symbols:
normalized_symbol = str(symbol or "").strip().upper()
if not normalized_symbol:
continue
results.append(
enrich_news_for_symbol(
store,
normalized_symbol,
start_date=start_date,
end_date=end_date,
limit=limit,
analysis_source=analysis_source,
skip_existing=skip_existing,
only_reanalyze_local=only_reanalyze_local,
)
)
return results

View File

@@ -0,0 +1,2 @@
# -*- coding: utf-8 -*-
"""Explain-oriented services for stock narratives and news research."""

View File

@@ -0,0 +1,69 @@
# -*- coding: utf-8 -*-
"""Rule-based news categorization for explain UI."""
from __future__ import annotations
from typing import Any, Dict, Iterable
CATEGORY_KEYWORDS = {
"market": [
"market", "stock", "rally", "sell-off", "selloff", "trading",
"wall street", "s&p", "nasdaq", "dow", "index", "bull", "bear",
"correction", "volatility",
],
"policy": [
"regulation", "fed", "federal reserve", "tariff", "sanction",
"interest rate", "policy", "government", "congress", "sec",
"trade war", "ban", "legislation", "tax",
],
"earnings": [
"earnings", "revenue", "profit", "quarter", "eps", "guidance",
"forecast", "income", "sales", "beat", "miss", "outlook",
"financial results",
],
"product_tech": [
"product", "ai", "chip", "cloud", "launch", "patent",
"technology", "innovation", "release", "platform", "model",
"software", "hardware", "gpu", "autonomous",
],
"competition": [
"competitor", "rival", "market share", "overtake", "compete",
"competition", "vs", "versus", "battle", "challenge",
],
"management": [
"ceo", "executive", "resign", "layoff", "restructure",
"management", "leadership", "appoint", "hire", "board",
"chairman",
],
}
def categorize_news_rows(rows: Iterable[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
"""Bucket news rows by keyword categories."""
categories: Dict[str, Dict[str, Any]] = {
key: {
"label": key,
"count": 0,
"article_ids": [],
}
for key in CATEGORY_KEYWORDS
}
for row in rows:
text = " ".join(
[
str(row.get("title") or ""),
str(row.get("summary") or ""),
str(row.get("related") or ""),
str(row.get("category") or ""),
]
).lower()
article_id = row.get("id")
for category, keywords in CATEGORY_KEYWORDS.items():
if any(keyword in text for keyword in keywords):
categories[category]["count"] += 1
if article_id:
categories[category]["article_ids"].append(article_id)
return categories

View File

@@ -0,0 +1,214 @@
# -*- coding: utf-8 -*-
"""Local range explanation built from price and persisted news."""
from __future__ import annotations
from typing import Any, Dict
from backend.enrich.llm_enricher import analyze_range_with_llm
from backend.explain.category_engine import categorize_news_rows
from backend.tools.data_tools import get_prices
def _rank_event_score(row: Dict[str, Any]) -> float:
relevance = str(row.get("relevance") or "").strip().lower()
relevance_score = {"high": 3.0, "relevant": 3.0, "medium": 2.0, "low": 1.0}.get(
relevance,
0.5,
)
impact_score = abs(float(row.get("ret_t0") or 0.0)) * 100
return relevance_score + impact_score
def summarize_bullish_factors(
news_rows: list[Dict[str, Any]],
*,
limit: int = 5,
) -> list[str]:
factors = []
for row in news_rows:
if str(row.get("sentiment") or "").strip().lower() != "positive":
continue
candidate = row.get("reason_growth") or row.get("key_discussion") or row.get("summary") or row.get("title")
if candidate:
factors.append(str(candidate).strip())
seen = set()
output = []
for factor in factors:
if factor in seen:
continue
seen.add(factor)
output.append(factor[:200])
if len(output) >= limit:
break
return output
def summarize_bearish_factors(
news_rows: list[Dict[str, Any]],
*,
limit: int = 5,
) -> list[str]:
factors = []
for row in news_rows:
if str(row.get("sentiment") or "").strip().lower() != "negative":
continue
candidate = row.get("reason_decrease") or row.get("key_discussion") or row.get("summary") or row.get("title")
if candidate:
factors.append(str(candidate).strip())
seen = set()
output = []
for factor in factors:
if factor in seen:
continue
seen.add(factor)
output.append(factor[:200])
if len(output) >= limit:
break
return output
def build_trend_analysis(prices: list[Any]) -> str:
if len(prices) < 2:
return "区间样本较短,暂不具备足够趋势信息。"
if len(prices) < 3:
open_price = float(prices[0].open)
close_price = float(prices[-1].close)
change = ((close_price - open_price) / open_price) * 100 if open_price else 0.0
return f"短区间内价格变动 {change:+.2f}%,趋势信息有限。"
mid = len(prices) // 2
first_open = float(prices[0].open)
first_close = float(prices[mid].close)
second_open = float(prices[mid].open)
second_close = float(prices[-1].close)
first_half = ((first_close - first_open) / first_open) * 100 if first_open else 0.0
second_half = ((second_close - second_open) / second_open) * 100 if second_open else 0.0
return (
f"前半段{'上涨' if first_half >= 0 else '下跌'} {abs(first_half):.2f}%"
f"后半段{'上涨' if second_half >= 0 else '下跌'} {abs(second_half):.2f}%"
"说明价格驱动在区间内部出现了阶段性切换。"
)
def build_range_explanation(
*,
ticker: str,
start_date: str,
end_date: str,
news_rows: list[Dict[str, Any]],
) -> Dict[str, Any]:
"""Explain a price range with local price and news heuristics."""
prices = get_prices(ticker, start_date, end_date)
if not prices:
return {
"symbol": ticker,
"start_date": start_date,
"end_date": end_date,
"error": "No OHLC data for this range",
}
open_price = float(prices[0].open)
close_price = float(prices[-1].close)
high_price = max(float(price.high) for price in prices)
low_price = min(float(price.low) for price in prices)
total_volume = sum(int(price.volume) for price in prices)
price_change_pct = (
((close_price - open_price) / open_price) * 100 if open_price else 0.0
)
categories = categorize_news_rows(news_rows)
news_count = len(news_rows)
dominant_categories = sorted(
(
{"category": key, "count": value["count"]}
for key, value in categories.items()
if value["count"] > 0
),
key=lambda item: item["count"],
reverse=True,
)
direction = "上涨" if price_change_pct > 0 else "下跌" if price_change_pct < 0 else "横盘"
category_text = (
f"主要主题集中在 {', '.join(item['category'] for item in dominant_categories[:3])}"
if dominant_categories
else "区间内未识别出明显的主题聚类。"
)
summary = (
f"{ticker}{start_date}{end_date} 区间内{direction} {abs(price_change_pct):.2f}%"
f"区间覆盖 {len(prices)} 个交易日,关联新闻 {news_count} 条。{category_text}"
)
bullish_factors = summarize_bullish_factors(news_rows)
bearish_factors = summarize_bearish_factors(news_rows)
trend_analysis = build_trend_analysis(prices)
llm_source = "local"
range_payload = {
"ticker": ticker,
"start_date": start_date,
"end_date": end_date,
"price_change_pct": round(price_change_pct, 2),
"trading_days": len(prices),
"news_count": news_count,
"dominant_categories": dominant_categories[:5],
"bullish_factors": bullish_factors[:3],
"bearish_factors": bearish_factors[:3],
"trend_analysis": trend_analysis,
"top_news": [
{
"date": row.get("trade_date") or str(row.get("date") or "")[:10],
"title": row.get("title") or "",
"summary": row.get("summary") or "",
"sentiment": row.get("sentiment") or "",
"relevance": row.get("relevance") or "",
"ret_t0": row.get("ret_t0"),
}
for row in sorted(news_rows, key=_rank_event_score, reverse=True)[:5]
],
}
llm_analysis = analyze_range_with_llm(range_payload)
if isinstance(llm_analysis, dict):
summary = llm_analysis.get("summary") or summary
trend_analysis = llm_analysis.get("trend_analysis") or trend_analysis
bullish_factors = llm_analysis.get("bullish_factors") or bullish_factors
bearish_factors = llm_analysis.get("bearish_factors") or bearish_factors
llm_source = "llm"
key_events = [
{
"date": row.get("trade_date") or str(row.get("date") or "")[:10],
"title": row.get("title") or "Untitled news",
"summary": row.get("summary") or "",
"category": row.get("category") or "",
"id": row.get("id"),
"sentiment": row.get("sentiment"),
"ret_t0": row.get("ret_t0"),
}
for row in sorted(news_rows, key=_rank_event_score, reverse=True)[:8]
]
return {
"symbol": ticker,
"start_date": start_date,
"end_date": end_date,
"price_change_pct": round(price_change_pct, 2),
"open_price": open_price,
"close_price": close_price,
"high_price": high_price,
"low_price": low_price,
"total_volume": total_volume,
"trading_days": len(prices),
"news_count": news_count,
"dominant_categories": dominant_categories[:5],
"analysis": {
"summary": summary,
"key_events": key_events,
"bullish_factors": bullish_factors,
"bearish_factors": bearish_factors,
"trend_analysis": trend_analysis,
"analysis_source": llm_source,
"analysis_model_label": llm_analysis.get("model_label") if isinstance(llm_analysis, dict) else None,
},
}

View File

@@ -0,0 +1,202 @@
# -*- coding: utf-8 -*-
"""Same-ticker historical similar day search for explain view."""
from __future__ import annotations
from math import sqrt
from typing import Any
from backend.data.market_store import MarketStore
def _safe_float(value: Any, default: float = 0.0) -> float:
try:
parsed = float(value)
except (TypeError, ValueError):
return default
return parsed
def build_daily_feature_rows(
*,
symbol: str,
ohlc_rows: list[dict[str, Any]],
news_rows: list[dict[str, Any]],
) -> list[dict[str, Any]]:
"""Aggregate price/news context into daily feature rows."""
price_by_date = {str(row.get("date")): row for row in ohlc_rows if row.get("date")}
ordered_dates = [str(row.get("date")) for row in ohlc_rows if row.get("date")]
news_by_date: dict[str, list[dict[str, Any]]] = {}
for row in news_rows:
trade_date = str(row.get("trade_date") or "")[:10] or str(row.get("date") or "")[:10]
if not trade_date:
continue
news_by_date.setdefault(trade_date, []).append(row)
features: list[dict[str, Any]] = []
previous_close: float | None = None
for idx, date in enumerate(ordered_dates):
price_row = price_by_date[date]
close_price = _safe_float(price_row.get("close"))
open_price = _safe_float(price_row.get("open"), close_price)
day_news = news_by_date.get(date, [])
positive_count = sum(1 for item in day_news if str(item.get("sentiment") or "").lower() == "positive")
negative_count = sum(1 for item in day_news if str(item.get("sentiment") or "").lower() == "negative")
high_relevance_count = sum(
1 for item in day_news if str(item.get("relevance") or "").lower() in {"high", "relevant"}
)
ret_1d = (
((close_price - previous_close) / previous_close)
if previous_close not in (None, 0)
else 0.0
)
intraday_ret = ((close_price - open_price) / open_price) if open_price else 0.0
sentiment_score = (
(positive_count - negative_count) / max(len(day_news), 1)
if day_news
else 0.0
)
future_t1 = None
future_t3 = None
if idx + 1 < len(ordered_dates) and close_price:
next_close = _safe_float(price_by_date[ordered_dates[idx + 1]].get("close"))
future_t1 = ((next_close - close_price) / close_price) if next_close else None
if idx + 3 < len(ordered_dates) and close_price:
next_close = _safe_float(price_by_date[ordered_dates[idx + 3]].get("close"))
future_t3 = ((next_close - close_price) / close_price) if next_close else None
features.append(
{
"date": date,
"symbol": symbol,
"n_articles": len(day_news),
"positive_count": positive_count,
"negative_count": negative_count,
"high_relevance_count": high_relevance_count,
"sentiment_score": sentiment_score,
"ret_1d": ret_1d,
"intraday_ret": intraday_ret,
"close": close_price,
"ret_t1_after": future_t1,
"ret_t3_after": future_t3,
"news": [
{
"title": row.get("title") or "",
"sentiment": row.get("sentiment") or "neutral",
}
for row in day_news[:3]
],
}
)
previous_close = close_price
return features
def compute_similarity_scores(
target_vector: list[float],
candidate_vectors: list[tuple[str, list[float], dict[str, Any]]],
) -> list[dict[str, Any]]:
"""Return sorted similarity matches based on normalized Euclidean distance."""
if not candidate_vectors:
return []
dimensions = len(target_vector)
ranges = []
for dimension in range(dimensions):
values = [vector[1][dimension] for vector in candidate_vectors] + [target_vector[dimension]]
min_value = min(values)
max_value = max(values)
ranges.append(max(max_value - min_value, 1e-9))
scored = []
for date, vector, payload in candidate_vectors:
distance = sqrt(
sum(
((target_vector[i] - vector[i]) / ranges[i]) ** 2
for i in range(dimensions)
)
)
similarity = 1.0 / (1.0 + distance)
scored.append(
{
"date": date,
"score": round(similarity, 4),
**payload,
}
)
return sorted(scored, key=lambda item: item["score"], reverse=True)
def find_similar_days(
store: MarketStore,
*,
symbol: str,
target_date: str,
top_k: int = 10,
) -> dict[str, Any]:
"""Find same-ticker historical days most similar to a target day."""
cached = store.get_similar_day_cache(symbol, target_date=target_date)
if cached and cached.get("payload"):
return cached["payload"]
ohlc_rows = store.get_ohlc(symbol, "1900-01-01", target_date)
news_rows = store.get_news_items_enriched(symbol, end_date=target_date, limit=500)
daily_rows = build_daily_feature_rows(symbol=symbol, ohlc_rows=ohlc_rows, news_rows=news_rows)
feature_map = {row["date"]: row for row in daily_rows}
target_row = feature_map.get(target_date)
if not target_row:
return {
"symbol": symbol,
"target_date": target_date,
"items": [],
"error": "No feature row for target date",
}
vector_keys = [
"sentiment_score",
"n_articles",
"positive_count",
"negative_count",
"high_relevance_count",
"ret_1d",
"intraday_ret",
]
target_vector = [_safe_float(target_row.get(key)) for key in vector_keys]
candidates = []
for row in daily_rows:
date = row["date"]
if date == target_date:
continue
payload = {
"n_articles": row["n_articles"],
"sentiment_score": round(row["sentiment_score"], 4),
"ret_1d": round(row["ret_1d"] * 100, 2),
"intraday_ret": round(row["intraday_ret"] * 100, 2),
"ret_t1_after": round(row["ret_t1_after"] * 100, 2) if row["ret_t1_after"] is not None else None,
"ret_t3_after": round(row["ret_t3_after"] * 100, 2) if row["ret_t3_after"] is not None else None,
"top_reasons": [item["title"] for item in row["news"][:2] if item.get("title")],
"news": row["news"],
}
candidates.append(
(
date,
[_safe_float(row.get(key)) for key in vector_keys],
payload,
)
)
items = compute_similarity_scores(target_vector, candidates)[: max(1, min(int(top_k), 20))]
result = {
"symbol": symbol,
"target_date": target_date,
"target_features": {
"sentiment_score": round(target_row["sentiment_score"], 4),
"n_articles": target_row["n_articles"],
"ret_1d": round(target_row["ret_1d"] * 100, 2),
"intraday_ret": round(target_row["intraday_ret"] * 100, 2),
"high_relevance_count": target_row["high_relevance_count"],
},
"items": items,
}
store.upsert_similar_day_cache(symbol, target_date=target_date, payload=result, source="local")
return result

View File

@@ -0,0 +1,127 @@
# -*- coding: utf-8 -*-
"""Stock story generation for explain view."""
from __future__ import annotations
from datetime import datetime, timedelta
from typing import Any
from backend.data.market_store import MarketStore
def build_stock_story(
*,
symbol: str,
as_of_date: str,
price_rows: list[dict[str, Any]],
news_rows: list[dict[str, Any]],
) -> str:
"""Build a compact markdown story from enriched news and recent price action."""
lines = [f"## {symbol} Story", f"As of `{as_of_date}`"]
if not price_rows:
lines.append("")
lines.append("No OHLC data available for story generation.")
return "\n".join(lines)
open_price = float(price_rows[0].get("open") or price_rows[0].get("close") or 0.0)
close_price = float(price_rows[-1].get("close") or 0.0)
price_change = ((close_price - open_price) / open_price) * 100 if open_price else 0.0
high_price = max(float(row.get("high") or row.get("close") or 0.0) for row in price_rows)
low_price = min(float(row.get("low") or row.get("close") or 0.0) for row in price_rows)
lines.append("")
lines.append(
f"The stock moved {'up' if price_change >= 0 else 'down'} "
f"{abs(price_change):.2f}% over the recent window, trading between "
f"${low_price:.2f} and ${high_price:.2f}."
)
positive = [row for row in news_rows if str(row.get("sentiment") or "").lower() == "positive"]
negative = [row for row in news_rows if str(row.get("sentiment") or "").lower() == "negative"]
lines.append("")
lines.append(
f"Recent coverage included {len(news_rows)} relevant articles "
f"({len(positive)} positive / {len(negative)} negative)."
)
if news_rows:
lines.append("")
lines.append("### Key Moments")
ranked_rows = sorted(
news_rows,
key=lambda row: (
0 if str(row.get("relevance") or "").lower() in {"high", "relevant"} else 1,
-abs(float(row.get("ret_t0") or 0.0)),
),
)
for row in ranked_rows[:5]:
trade_date = row.get("trade_date") or str(row.get("date") or "")[:10]
title = row.get("title") or "Untitled"
key_discussion = row.get("key_discussion") or row.get("summary") or ""
sentiment = str(row.get("sentiment") or "neutral").lower()
lines.append(
f"- `{trade_date}` [{sentiment}] {title}: {str(key_discussion).strip()[:220]}"
)
if positive:
lines.append("")
lines.append("### Bullish Threads")
for row in positive[:3]:
reason = row.get("reason_growth") or row.get("key_discussion") or row.get("summary") or row.get("title")
lines.append(f"- {str(reason).strip()[:220]}")
if negative:
lines.append("")
lines.append("### Bearish Threads")
for row in negative[:3]:
reason = row.get("reason_decrease") or row.get("key_discussion") or row.get("summary") or row.get("title")
lines.append(f"- {str(reason).strip()[:220]}")
return "\n".join(lines)
def get_or_create_stock_story(
store: MarketStore,
*,
symbol: str,
as_of_date: str,
) -> dict[str, Any]:
"""Return cached story or build a new one from recent market context."""
cached = store.get_story_cache(symbol, as_of_date=as_of_date)
if cached:
return {
"symbol": symbol,
"as_of_date": as_of_date,
"story": cached.get("content") or "",
"source": cached.get("source") or "cache",
}
start_date = None
if len(as_of_date) >= 10:
target_date = datetime.strptime(as_of_date[:10], "%Y-%m-%d").date()
start_date = (target_date - timedelta(days=29)).isoformat()
price_rows = (
store.get_ohlc(symbol, start_date, as_of_date)
if start_date
else []
)
news_rows = store.get_news_items_enriched(
symbol,
start_date=start_date,
end_date=as_of_date,
limit=40,
)
story = build_stock_story(
symbol=symbol,
as_of_date=as_of_date,
price_rows=price_rows,
news_rows=news_rows,
)
store.upsert_story_cache(symbol, as_of_date=as_of_date, content=story, source="local")
return {
"symbol": symbol,
"as_of_date": as_of_date,
"story": story,
"source": "local",
}

View File

@@ -17,6 +17,12 @@ from backend.config.bootstrap_config import (
update_bootstrap_values_for_run,
)
from backend.data.provider_utils import normalize_symbol
from backend.data.market_ingest import ingest_symbols
from backend.enrich.llm_enricher import llm_enrichment_enabled
from backend.enrich.news_enricher import enrich_news_for_symbol
from backend.explain.range_explainer import build_range_explanation
from backend.explain.similarity_service import find_similar_days
from backend.explain.story_service import get_or_create_stock_story
from backend.utils.msg_adapter import FrontendAdapter
from backend.utils.terminal_dashboard import get_dashboard
from backend.core.pipeline import TradingPipeline
@@ -25,6 +31,7 @@ from backend.services.market import MarketService
from backend.services.storage import StorageService
from backend.data.provider_router import get_provider_router
from backend.tools.data_tools import get_prices
from backend.tools.data_tools import get_company_news
logger = logging.getLogger(__name__)
@@ -65,6 +72,7 @@ class Gateway:
self._backtest_end_date: Optional[str] = None
self._dashboard = get_dashboard()
self._market_status_task: Optional[asyncio.Task] = None
self._watchlist_ingest_task: Optional[asyncio.Task] = None
# Session tracking for live returns
self._session_start_portfolio_value: Optional[float] = None
@@ -182,6 +190,17 @@ class Gateway:
def state(self) -> Dict[str, Any]:
return self.state_sync.state
@staticmethod
def _news_rows_need_enrichment(rows: List[Dict[str, Any]]) -> bool:
if not rows:
return True
return all(
not row.get("sentiment")
and not row.get("relevance")
and not row.get("key_discussion")
for row in rows
)
async def handle_client(self, websocket: ServerConnection):
"""Handle WebSocket client connection"""
async with self.lock:
@@ -250,6 +269,22 @@ class Gateway:
await self._handle_get_stock_history(websocket, data)
elif msg_type == "get_stock_explain_events":
await self._handle_get_stock_explain_events(websocket, data)
elif msg_type == "get_stock_news":
await self._handle_get_stock_news(websocket, data)
elif msg_type == "get_stock_news_for_date":
await self._handle_get_stock_news_for_date(websocket, data)
elif msg_type == "get_stock_news_timeline":
await self._handle_get_stock_news_timeline(websocket, data)
elif msg_type == "get_stock_news_categories":
await self._handle_get_stock_news_categories(websocket, data)
elif msg_type == "get_stock_range_explain":
await self._handle_get_stock_range_explain(websocket, data)
elif msg_type == "get_stock_story":
await self._handle_get_stock_story(websocket, data)
elif msg_type == "get_stock_similar_days":
await self._handle_get_stock_similar_days(websocket, data)
elif msg_type == "run_stock_enrich":
await self._handle_run_stock_enrich(websocket, data)
except websockets.ConnectionClosed:
pass
@@ -297,6 +332,14 @@ class Gateway:
"%Y-%m-%d",
)
prices = await asyncio.to_thread(
self.storage.market_store.get_ohlc,
ticker,
start_date,
end_date,
)
source = "polygon"
if not prices:
prices = await asyncio.to_thread(
get_prices,
ticker,
@@ -305,13 +348,23 @@ class Gateway:
)
usage_snapshot = self._provider_router.get_usage_snapshot()
source = usage_snapshot.get("last_success", {}).get("prices")
if prices:
await asyncio.to_thread(
self.storage.market_store.upsert_ohlc,
ticker,
[price.model_dump() for price in prices],
source=source or "provider",
)
await websocket.send(
json.dumps(
{
"type": "stock_history_loaded",
"ticker": ticker,
"prices": [price.model_dump() for price in prices][-120:],
"prices": [
price if isinstance(price, dict) else price.model_dump()
for price in prices
][-120:],
"source": source,
"start_date": start_date,
"end_date": end_date,
@@ -342,6 +395,636 @@ class Gateway:
),
)
async def _handle_get_stock_news(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
if not ticker:
await websocket.send(
json.dumps(
{
"type": "stock_news_loaded",
"ticker": "",
"news": [],
"source": None,
"error": "invalid ticker",
},
ensure_ascii=False,
),
)
return
lookback_days = data.get("lookback_days", 30)
limit = data.get("limit", 12)
try:
lookback_days = max(7, min(int(lookback_days), 180))
except (TypeError, ValueError):
lookback_days = 30
try:
limit = max(1, min(int(limit), 30))
except (TypeError, ValueError):
limit = 12
end_date = self.state_sync.state.get("current_date")
if not end_date:
end_date = datetime.now().strftime("%Y-%m-%d")
try:
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
except ValueError:
end_dt = datetime.now()
end_date = end_dt.strftime("%Y-%m-%d")
start_date = (end_dt - timedelta(days=lookback_days)).strftime(
"%Y-%m-%d",
)
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
start_date=start_date,
end_date=end_date,
limit=limit,
)
source = "polygon"
if self._news_rows_need_enrichment(news_rows):
news = await asyncio.to_thread(
get_company_news,
ticker,
end_date,
start_date,
limit,
)
if news:
usage_snapshot = self._provider_router.get_usage_snapshot()
source = usage_snapshot.get("last_success", {}).get("company_news")
await asyncio.to_thread(
self.storage.market_store.upsert_news,
ticker,
[item.model_dump() for item in news],
source=source or "provider",
)
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=start_date,
end_date=end_date,
limit=max(limit, 50),
)
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
start_date=start_date,
end_date=end_date,
limit=limit,
)
source = source or "market_store"
await websocket.send(
json.dumps(
{
"type": "stock_news_loaded",
"ticker": ticker,
"news": news_rows[-limit:],
"source": source,
"start_date": start_date,
"end_date": end_date,
},
ensure_ascii=False,
default=str,
),
)
async def _handle_get_stock_news_for_date(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
trade_date = str(data.get("date") or "").strip()
if not ticker or not trade_date:
await websocket.send(
json.dumps(
{
"type": "stock_news_for_date_loaded",
"ticker": ticker,
"date": trade_date,
"news": [],
"error": "ticker and date are required",
},
ensure_ascii=False,
),
)
return
limit = data.get("limit", 20)
try:
limit = max(1, min(int(limit), 50))
except (TypeError, ValueError):
limit = 20
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
trade_date=trade_date,
limit=limit,
)
if self._news_rows_need_enrichment(news_rows):
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=trade_date,
end_date=trade_date,
limit=limit,
)
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
trade_date=trade_date,
limit=limit,
)
await websocket.send(
json.dumps(
{
"type": "stock_news_for_date_loaded",
"ticker": ticker,
"date": trade_date,
"news": news_rows,
"source": "market_store",
},
ensure_ascii=False,
default=str,
),
)
async def _handle_get_stock_news_timeline(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
if not ticker:
await websocket.send(
json.dumps(
{
"type": "stock_news_timeline_loaded",
"ticker": "",
"timeline": [],
"error": "invalid ticker",
},
ensure_ascii=False,
),
)
return
lookback_days = data.get("lookback_days", 90)
try:
lookback_days = max(7, min(int(lookback_days), 365))
except (TypeError, ValueError):
lookback_days = 90
end_date = self.state_sync.state.get("current_date")
if not end_date:
end_date = datetime.now().strftime("%Y-%m-%d")
try:
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
except ValueError:
end_dt = datetime.now()
end_date = end_dt.strftime("%Y-%m-%d")
start_date = (end_dt - timedelta(days=lookback_days)).strftime(
"%Y-%m-%d",
)
timeline = await asyncio.to_thread(
self.storage.market_store.get_news_timeline_enriched,
ticker,
start_date=start_date,
end_date=end_date,
)
if not timeline:
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=start_date,
end_date=end_date,
limit=200,
)
timeline = await asyncio.to_thread(
self.storage.market_store.get_news_timeline_enriched,
ticker,
start_date=start_date,
end_date=end_date,
)
await websocket.send(
json.dumps(
{
"type": "stock_news_timeline_loaded",
"ticker": ticker,
"timeline": timeline,
"start_date": start_date,
"end_date": end_date,
},
ensure_ascii=False,
default=str,
),
)
async def _handle_get_stock_news_categories(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
if not ticker:
await websocket.send(
json.dumps(
{
"type": "stock_news_categories_loaded",
"ticker": "",
"categories": {},
"error": "invalid ticker",
},
ensure_ascii=False,
),
)
return
lookback_days = data.get("lookback_days", 90)
try:
lookback_days = max(7, min(int(lookback_days), 365))
except (TypeError, ValueError):
lookback_days = 90
end_date = self.state_sync.state.get("current_date")
if not end_date:
end_date = datetime.now().strftime("%Y-%m-%d")
try:
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
except ValueError:
end_dt = datetime.now()
end_date = end_dt.strftime("%Y-%m-%d")
start_date = (end_dt - timedelta(days=lookback_days)).strftime(
"%Y-%m-%d",
)
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
start_date=start_date,
end_date=end_date,
limit=200,
)
if self._news_rows_need_enrichment(news_rows):
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=start_date,
end_date=end_date,
limit=200,
)
categories = await asyncio.to_thread(
self.storage.market_store.get_news_categories_enriched,
ticker,
start_date=start_date,
end_date=end_date,
limit=200,
)
await websocket.send(
json.dumps(
{
"type": "stock_news_categories_loaded",
"ticker": ticker,
"categories": categories,
"start_date": start_date,
"end_date": end_date,
},
ensure_ascii=False,
default=str,
),
)
async def _handle_get_stock_range_explain(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
start_date = str(data.get("start_date") or "").strip()
end_date = str(data.get("end_date") or "").strip()
if not ticker or not start_date or not end_date:
await websocket.send(
json.dumps(
{
"type": "stock_range_explain_loaded",
"ticker": ticker,
"result": {"error": "ticker, start_date, end_date are required"},
},
ensure_ascii=False,
),
)
return
article_ids = data.get("article_ids")
if isinstance(article_ids, list) and article_ids:
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_by_ids_enriched,
ticker,
article_ids,
)
if self._news_rows_need_enrichment(news_rows):
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=start_date,
end_date=end_date,
limit=100,
)
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_by_ids_enriched,
ticker,
article_ids,
)
else:
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
start_date=start_date,
end_date=end_date,
limit=100,
)
if not news_rows:
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=start_date,
end_date=end_date,
limit=100,
)
news_rows = await asyncio.to_thread(
self.storage.market_store.get_news_items_enriched,
ticker,
start_date=start_date,
end_date=end_date,
limit=100,
)
result = await asyncio.to_thread(
build_range_explanation,
ticker=ticker,
start_date=start_date,
end_date=end_date,
news_rows=news_rows,
)
await websocket.send(
json.dumps(
{
"type": "stock_range_explain_loaded",
"ticker": ticker,
"result": result,
},
ensure_ascii=False,
default=str,
),
)
async def _handle_get_stock_story(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
if not ticker:
await websocket.send(
json.dumps(
{
"type": "stock_story_loaded",
"ticker": "",
"story": "",
"error": "invalid ticker",
},
ensure_ascii=False,
),
)
return
as_of_date = str(
data.get("as_of_date")
or self.state_sync.state.get("current_date")
or datetime.now().strftime("%Y-%m-%d")
).strip()[:10]
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
end_date=as_of_date,
limit=80,
)
result = await asyncio.to_thread(
get_or_create_stock_story,
self.storage.market_store,
symbol=ticker,
as_of_date=as_of_date,
)
await websocket.send(
json.dumps(
{
"type": "stock_story_loaded",
"ticker": ticker,
"as_of_date": as_of_date,
"story": result.get("story") or "",
"source": result.get("source") or "local",
},
ensure_ascii=False,
default=str,
),
)
async def _handle_get_stock_similar_days(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
target_date = str(data.get("date") or "").strip()[:10]
if not ticker or not target_date:
await websocket.send(
json.dumps(
{
"type": "stock_similar_days_loaded",
"ticker": ticker,
"date": target_date,
"items": [],
"error": "ticker and date are required",
},
ensure_ascii=False,
),
)
return
top_k = data.get("top_k", 8)
try:
top_k = max(1, min(int(top_k), 20))
except (TypeError, ValueError):
top_k = 8
await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
end_date=target_date,
limit=200,
)
result = await asyncio.to_thread(
find_similar_days,
self.storage.market_store,
symbol=ticker,
target_date=target_date,
top_k=top_k,
)
await websocket.send(
json.dumps(
{
"type": "stock_similar_days_loaded",
"ticker": ticker,
"date": target_date,
**result,
},
ensure_ascii=False,
default=str,
),
)
async def _handle_run_stock_enrich(
self,
websocket: ServerConnection,
data: Dict[str, Any],
):
ticker = normalize_symbol(data.get("ticker", ""))
start_date = str(data.get("start_date") or "").strip()[:10]
end_date = str(data.get("end_date") or "").strip()[:10]
story_date = str(data.get("story_date") or end_date or "").strip()[:10]
target_date = str(data.get("target_date") or "").strip()[:10]
force = bool(data.get("force", False))
rebuild_story = bool(data.get("rebuild_story", True))
rebuild_similar_days = bool(data.get("rebuild_similar_days", True))
only_local_to_llm = bool(data.get("only_local_to_llm", False))
limit = data.get("limit", 200)
try:
limit = max(10, min(int(limit), 500))
except (TypeError, ValueError):
limit = 200
if not ticker or not start_date or not end_date:
await websocket.send(
json.dumps(
{
"type": "stock_enrich_completed",
"ticker": ticker,
"start_date": start_date,
"end_date": end_date,
"error": "ticker, start_date, end_date are required",
},
ensure_ascii=False,
),
)
return
if only_local_to_llm and not llm_enrichment_enabled():
await websocket.send(
json.dumps(
{
"type": "stock_enrich_completed",
"ticker": ticker,
"start_date": start_date,
"end_date": end_date,
"error": "only_local_to_llm requires EXPLAIN_ENRICH_USE_LLM=true and a configured LLM provider",
},
ensure_ascii=False,
),
)
return
result = await asyncio.to_thread(
enrich_news_for_symbol,
self.storage.market_store,
ticker,
start_date=start_date,
end_date=end_date,
limit=limit,
skip_existing=not force,
only_reanalyze_local=only_local_to_llm,
)
story_status = None
if rebuild_story and story_date:
await asyncio.to_thread(
self.storage.market_store.delete_story_cache,
ticker,
as_of_date=story_date,
)
story_result = await asyncio.to_thread(
get_or_create_stock_story,
self.storage.market_store,
symbol=ticker,
as_of_date=story_date,
)
story_status = {
"as_of_date": story_date,
"source": story_result.get("source") or "local",
}
similar_status = None
if rebuild_similar_days and target_date:
await asyncio.to_thread(
self.storage.market_store.delete_similar_day_cache,
ticker,
target_date=target_date,
)
similar_result = await asyncio.to_thread(
find_similar_days,
self.storage.market_store,
symbol=ticker,
target_date=target_date,
top_k=8,
)
similar_status = {
"target_date": target_date,
"count": len(similar_result.get("items") or []),
"error": similar_result.get("error"),
}
await websocket.send(
json.dumps(
{
"type": "stock_enrich_completed",
"ticker": ticker,
"start_date": start_date,
"end_date": end_date,
"story_date": story_date or None,
"target_date": target_date or None,
"force": force,
"only_local_to_llm": only_local_to_llm,
"stats": result,
"story_status": story_status,
"similar_status": similar_status,
},
ensure_ascii=False,
default=str,
),
)
async def _handle_start_backtest(self, data: Dict[str, Any]):
if not self.is_backtest:
return
@@ -410,6 +1093,7 @@ class Gateway:
},
)
await self._handle_reload_runtime_assets()
self._schedule_watchlist_market_store_refresh(tickers)
@staticmethod
def _normalize_watchlist(raw_tickers: Any) -> List[str]:
@@ -538,6 +1222,48 @@ class Gateway:
trades=trades,
)
def _schedule_watchlist_market_store_refresh(
self,
tickers: List[str],
) -> None:
"""Kick off a non-blocking Polygon refresh for the updated watchlist."""
if not tickers:
return
if self._watchlist_ingest_task and not self._watchlist_ingest_task.done():
self._watchlist_ingest_task.cancel()
self._watchlist_ingest_task = asyncio.create_task(
self._refresh_market_store_for_watchlist(tickers),
)
async def _refresh_market_store_for_watchlist(
self,
tickers: List[str],
) -> None:
"""Refresh the long-lived market store after a watchlist update."""
try:
await self.state_sync.on_system_message(
f"正在同步自选股市场数据: {', '.join(tickers)}",
)
results = await asyncio.to_thread(
ingest_symbols,
tickers,
mode="incremental",
)
summary = ", ".join(
f"{item['symbol']} prices={item['prices']} news={item['news']}"
for item in results
)
await self.state_sync.on_system_message(
f"自选股市场数据已同步: {summary}",
)
except asyncio.CancelledError:
raise
except Exception as exc:
logger.warning("Watchlist market store refresh failed: %s", exc)
await self.state_sync.on_system_message(
f"自选股市场数据同步失败: {exc}",
)
async def broadcast(self, message: Dict[str, Any]):
"""Broadcast message to all connected clients"""
if not self.connected_clients:
@@ -896,4 +1622,6 @@ class Gateway:
self._backtest_task.cancel()
if self._market_status_task:
self._market_status_task.cancel()
if self._watchlist_ingest_task:
self._watchlist_ingest_task.cancel()
self._dashboard.stop()

View File

@@ -65,6 +65,18 @@ class MarketService:
self._session_start_values: Optional[Dict[str, float]] = None
self._session_start_timestamp: Optional[int] = None
def get_live_quote_provider(self) -> Optional[str]:
"""Return the active live quote provider for UI/debugging."""
if self.backtest_mode:
return "backtest"
if self.mock_mode:
return "mock"
if self._price_manager and hasattr(self._price_manager, "provider"):
provider = getattr(self._price_manager, "provider", None)
if isinstance(provider, str) and provider.strip():
return provider.strip().lower()
return None
@property
def mode_name(self) -> str:
if self.backtest_mode:
@@ -532,6 +544,7 @@ class MarketService:
"status": MarketStatus.OPEN,
"status_text": "Backtest Mode",
"is_trading_day": True,
"live_quote_provider": self.get_live_quote_provider(),
}
now = self._now_nyse()
@@ -544,6 +557,7 @@ class MarketService:
"status": MarketStatus.CLOSED,
"status_text": "Market Closed (Non-trading Day)",
"is_trading_day": False,
"live_quote_provider": self.get_live_quote_provider(),
}
market_open, market_close = self._get_market_hours(today)
@@ -553,6 +567,7 @@ class MarketService:
"status": MarketStatus.CLOSED,
"status_text": "Market Closed",
"is_trading_day": is_trading,
"live_quote_provider": self.get_live_quote_provider(),
}
# Determine status based on current time
@@ -563,6 +578,7 @@ class MarketService:
"is_trading_day": True,
"market_open": market_open.isoformat(),
"market_close": market_close.isoformat(),
"live_quote_provider": self.get_live_quote_provider(),
}
elif now > market_close:
return {
@@ -571,6 +587,7 @@ class MarketService:
"is_trading_day": True,
"market_open": market_open.isoformat(),
"market_close": market_close.isoformat(),
"live_quote_provider": self.get_live_quote_provider(),
}
else:
return {
@@ -579,6 +596,7 @@ class MarketService:
"is_trading_day": True,
"market_open": market_open.isoformat(),
"market_close": market_close.isoformat(),
"live_quote_provider": self.get_live_quote_provider(),
}
async def check_and_broadcast_market_status(self):

View File

@@ -0,0 +1,280 @@
# -*- coding: utf-8 -*-
"""Query-oriented storage for explain/research data."""
from __future__ import annotations
import json
import sqlite3
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, Iterable
from backend.data.schema import CompanyNews
SCHEMA = """
CREATE TABLE IF NOT EXISTS news_items (
id TEXT PRIMARY KEY,
ticker TEXT NOT NULL,
published_at TEXT,
trade_date TEXT,
source TEXT,
title TEXT NOT NULL,
summary TEXT,
url TEXT,
related TEXT,
category TEXT,
raw_json TEXT NOT NULL,
ingest_run_date TEXT,
created_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_news_items_ticker_date
ON news_items (ticker, trade_date DESC, published_at DESC);
"""
def _json_dumps(value: Any) -> str:
return json.dumps(value, ensure_ascii=False, sort_keys=True, default=str)
def _resolve_news_id(ticker: str, item: CompanyNews, fallback_index: int) -> str:
base = item.url or item.title or f"{ticker}-{fallback_index}"
return f"{ticker}:{base}"
def _resolve_trade_date(date_value: str | None) -> str | None:
if not date_value:
return None
normalized = str(date_value).strip()
if not normalized:
return None
if "T" in normalized:
return normalized.split("T", 1)[0]
if " " in normalized:
return normalized.split(" ", 1)[0]
return normalized[:10]
class ResearchDb:
"""Small SQLite helper for explain-oriented news storage."""
def __init__(self, db_path: Path):
self.db_path = Path(db_path)
self.db_path.parent.mkdir(parents=True, exist_ok=True)
self._init_db()
def _connect(self) -> sqlite3.Connection:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA foreign_keys=ON")
return conn
def _init_db(self):
with self._connect() as conn:
conn.executescript(SCHEMA)
def upsert_news_items(
self,
*,
ticker: str,
items: Iterable[CompanyNews],
ingest_run_date: str | None = None,
) -> list[dict[str, Any]]:
"""Persist provider news and return normalized rows."""
normalized_rows: list[dict[str, Any]] = []
timestamp = datetime.utcnow().isoformat(timespec="seconds")
symbol = str(ticker or "").strip().upper()
if not symbol:
return normalized_rows
with self._connect() as conn:
for index, item in enumerate(items):
news_id = _resolve_news_id(symbol, item, index)
trade_date = _resolve_trade_date(item.date)
payload = item.model_dump()
row = {
"id": news_id,
"ticker": symbol,
"published_at": item.date,
"trade_date": trade_date,
"source": item.source,
"title": item.title,
"summary": item.summary,
"url": item.url,
"related": item.related,
"category": item.category,
"raw_json": _json_dumps(payload),
"ingest_run_date": ingest_run_date,
"created_at": timestamp,
}
conn.execute(
"""
INSERT INTO news_items
(id, ticker, published_at, trade_date, source, title, summary, url,
related, category, raw_json, ingest_run_date, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
ticker = excluded.ticker,
published_at = excluded.published_at,
trade_date = excluded.trade_date,
source = excluded.source,
title = excluded.title,
summary = excluded.summary,
url = excluded.url,
related = excluded.related,
category = excluded.category,
raw_json = excluded.raw_json,
ingest_run_date = excluded.ingest_run_date
""",
(
row["id"],
row["ticker"],
row["published_at"],
row["trade_date"],
row["source"],
row["title"],
row["summary"],
row["url"],
row["related"],
row["category"],
row["raw_json"],
row["ingest_run_date"],
row["created_at"],
),
)
normalized_rows.append(row)
return normalized_rows
def get_news_items(
self,
*,
ticker: str,
start_date: str | None = None,
end_date: str | None = None,
limit: int = 20,
) -> list[dict[str, Any]]:
"""Return normalized news rows for explain UI."""
symbol = str(ticker or "").strip().upper()
if not symbol:
return []
sql = """
SELECT id, ticker, published_at, trade_date, source, title, summary,
url, related, category
FROM news_items
WHERE ticker = ?
"""
params: list[Any] = [symbol]
if start_date:
sql += " AND COALESCE(trade_date, substr(published_at, 1, 10)) >= ?"
params.append(start_date)
if end_date:
sql += " AND COALESCE(trade_date, substr(published_at, 1, 10)) <= ?"
params.append(end_date)
sql += " ORDER BY COALESCE(published_at, trade_date) DESC LIMIT ?"
params.append(max(1, int(limit)))
with self._connect() as conn:
rows = conn.execute(sql, params).fetchall()
return [
{
"id": row["id"],
"ticker": row["ticker"],
"date": row["published_at"] or row["trade_date"],
"trade_date": row["trade_date"],
"source": row["source"],
"title": row["title"],
"summary": row["summary"],
"url": row["url"],
"related": row["related"],
"category": row["category"],
}
for row in rows
]
def get_news_timeline(
self,
*,
ticker: str,
start_date: str | None = None,
end_date: str | None = None,
) -> list[dict[str, Any]]:
"""Aggregate news counts per trade date for chart markers."""
symbol = str(ticker or "").strip().upper()
if not symbol:
return []
sql = """
SELECT COALESCE(trade_date, substr(published_at, 1, 10)) AS date,
COUNT(*) AS count,
COUNT(DISTINCT source) AS source_count,
MAX(title) AS top_title
FROM news_items
WHERE ticker = ?
"""
params: list[Any] = [symbol]
if start_date:
sql += " AND COALESCE(trade_date, substr(published_at, 1, 10)) >= ?"
params.append(start_date)
if end_date:
sql += " AND COALESCE(trade_date, substr(published_at, 1, 10)) <= ?"
params.append(end_date)
sql += """
GROUP BY COALESCE(trade_date, substr(published_at, 1, 10))
ORDER BY date ASC
"""
with self._connect() as conn:
rows = conn.execute(sql, params).fetchall()
return [
{
"date": row["date"],
"count": int(row["count"] or 0),
"source_count": int(row["source_count"] or 0),
"top_title": row["top_title"] or "",
}
for row in rows
if row["date"]
]
def get_news_by_ids(
self,
*,
ticker: str,
article_ids: Iterable[str],
) -> list[dict[str, Any]]:
"""Return selected persisted news items."""
symbol = str(ticker or "").strip().upper()
ids = [str(article_id).strip() for article_id in article_ids if str(article_id).strip()]
if not symbol or not ids:
return []
placeholders = ",".join("?" for _ in ids)
sql = f"""
SELECT id, ticker, published_at, trade_date, source, title, summary,
url, related, category
FROM news_items
WHERE ticker = ? AND id IN ({placeholders})
ORDER BY COALESCE(published_at, trade_date) DESC
"""
with self._connect() as conn:
rows = conn.execute(sql, [symbol, *ids]).fetchall()
return [
{
"id": row["id"],
"ticker": row["ticker"],
"date": row["published_at"] or row["trade_date"],
"trade_date": row["trade_date"],
"source": row["source"],
"title": row["title"],
"summary": row["summary"],
"url": row["url"],
"related": row["related"],
"category": row["category"],
}
for row in rows
]

View File

@@ -10,6 +10,8 @@ from datetime import datetime
from pathlib import Path
from typing import Any, Dict, List, Optional
from backend.data.market_store import MarketStore
from .research_db import ResearchDb
from .runtime_db import RuntimeDb
logger = logging.getLogger(__name__)
@@ -64,6 +66,8 @@ class StorageService:
self.state_dir.mkdir(parents=True, exist_ok=True)
self.server_state_file = self.state_dir / "server_state.json"
self.runtime_db = RuntimeDb(self.state_dir / "runtime.db")
self.research_db = ResearchDb(self.state_dir / "research.db")
self.market_store = MarketStore()
# Feed history (for agent messages)
self.max_feed_history = 200

236
backend/tests/test_cli.py Normal file
View File

@@ -0,0 +1,236 @@
# -*- coding: utf-8 -*-
from pathlib import Path
from backend import cli
def test_live_runs_incremental_market_store_update_before_start(monkeypatch, tmp_path):
project_root = tmp_path
(project_root / ".env").write_text("FINNHUB_API_KEY=test\n", encoding="utf-8")
calls = []
monkeypatch.setattr(cli, "get_project_root", lambda: project_root)
monkeypatch.setattr(cli, "handle_history_cleanup", lambda config_name, auto_clean=False: None)
monkeypatch.setattr(cli, "run_data_updater", lambda project_root: calls.append(("run_data_updater", project_root)))
monkeypatch.setattr(
cli,
"auto_update_market_store",
lambda config_name, end_date=None: calls.append(("auto_update_market_store", config_name, end_date)),
)
monkeypatch.setattr(
cli,
"auto_enrich_market_store",
lambda config_name, end_date=None, lookback_days=120, force=False: calls.append(
("auto_enrich_market_store", config_name, end_date, lookback_days, force)
),
)
monkeypatch.setattr(cli.os, "chdir", lambda path: calls.append(("chdir", Path(path))))
def fake_run(cmd, check=True, **kwargs):
calls.append(("subprocess.run", cmd, check))
return 0
monkeypatch.setattr(cli.subprocess, "run", fake_run)
cli.live(
mock=False,
config_name="smoke_fullstack",
host="0.0.0.0",
port=8765,
trigger_time="now",
poll_interval=10,
clean=False,
enable_memory=False,
)
assert any(item[0] == "run_data_updater" for item in calls)
assert any(
item[0] == "auto_update_market_store" and item[1] == "smoke_fullstack"
for item in calls
)
assert any(
item[0] == "auto_enrich_market_store" and item[1] == "smoke_fullstack"
for item in calls
)
run_call = next(item for item in calls if item[0] == "subprocess.run")
assert run_call[1][:6] == [
cli.sys.executable,
"-u",
"-m",
"backend.main",
"--mode",
"live",
]
def test_backtest_runs_full_market_store_prepare_before_start(monkeypatch, tmp_path):
project_root = tmp_path
calls = []
monkeypatch.setattr(cli, "get_project_root", lambda: project_root)
monkeypatch.setattr(cli, "handle_history_cleanup", lambda config_name, auto_clean=False: None)
monkeypatch.setattr(cli, "run_data_updater", lambda project_root: calls.append(("run_data_updater", project_root)))
monkeypatch.setattr(
cli,
"auto_prepare_backtest_market_store",
lambda config_name, start_date, end_date: calls.append(
("auto_prepare_backtest_market_store", config_name, start_date, end_date)
),
)
monkeypatch.setattr(
cli,
"auto_enrich_market_store",
lambda config_name, end_date=None, lookback_days=120, force=False: calls.append(
("auto_enrich_market_store", config_name, end_date, lookback_days, force)
),
)
monkeypatch.setattr(cli.os, "chdir", lambda path: calls.append(("chdir", Path(path))))
def fake_run(cmd, check=True, **kwargs):
calls.append(("subprocess.run", cmd, check))
return 0
monkeypatch.setattr(cli.subprocess, "run", fake_run)
cli.backtest(
start="2026-03-01",
end="2026-03-10",
config_name="smoke_fullstack",
host="0.0.0.0",
port=8765,
poll_interval=10,
clean=False,
enable_memory=False,
)
assert any(item[0] == "run_data_updater" for item in calls)
assert any(
item[0] == "auto_prepare_backtest_market_store"
and item[1:] == ("smoke_fullstack", "2026-03-01", "2026-03-10")
for item in calls
)
assert any(
item[0] == "auto_enrich_market_store"
and item[1] == "smoke_fullstack"
and item[2] == "2026-03-10"
for item in calls
)
run_call = next(item for item in calls if item[0] == "subprocess.run")
assert run_call[1][:6] == [
cli.sys.executable,
"-u",
"-m",
"backend.main",
"--mode",
"backtest",
]
def test_ingest_enrich_runs_batch_enrichment(monkeypatch):
calls = []
monkeypatch.setattr(cli, "_resolve_symbols", lambda raw_tickers, config_name=None: ["AAPL", "MSFT"])
class DummyStore:
pass
monkeypatch.setattr(cli, "MarketStore", lambda: DummyStore())
monkeypatch.setattr(
cli,
"enrich_symbols",
lambda store, symbols, start_date=None, end_date=None, limit=200, analysis_source="local", skip_existing=True: calls.append(
("enrich_symbols", symbols, start_date, end_date, limit, analysis_source, skip_existing)
) or [
{
"symbol": symbol,
"news_count": 3,
"queued_count": 3,
"analyzed": 3,
"skipped_existing_count": 0,
"deduped_count": 0,
"llm_count": 0,
"local_count": 3,
}
for symbol in symbols
],
)
cli.ingest_enrich(
tickers=None,
start="2026-03-01",
end="2026-03-10",
limit=150,
force=False,
config_name="smoke_fullstack",
)
assert calls == [
("enrich_symbols", ["AAPL", "MSFT"], "2026-03-01", "2026-03-10", 150, "local", True)
]
def test_ingest_report_reads_market_store_report(monkeypatch):
calls = []
printed = []
monkeypatch.setattr(cli, "_resolve_symbols", lambda raw_tickers, config_name=None: ["AAPL"])
class DummyStore:
def get_enrich_report(self, symbols=None, start_date=None, end_date=None):
calls.append(("get_enrich_report", symbols, start_date, end_date))
return [
{
"symbol": "AAPL",
"raw_news_count": 10,
"analyzed_news_count": 8,
"coverage_pct": 80.0,
"llm_count": 5,
"local_count": 3,
"latest_trade_date": "2026-03-16",
"latest_analysis_at": "2026-03-16T09:00:00",
}
]
monkeypatch.setattr(cli, "MarketStore", lambda: DummyStore())
monkeypatch.setattr(cli, "get_explain_model_info", lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"})
monkeypatch.setattr(cli, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(cli.console, "print", lambda value: printed.append(value))
cli.ingest_report(
tickers=None,
start="2026-03-01",
end="2026-03-16",
config_name="smoke_fullstack",
only_problematic=False,
)
assert calls == [
("get_enrich_report", ["AAPL"], "2026-03-01", "2026-03-16")
]
assert printed
assert getattr(printed[0], "caption", "") == "Explain LLM: DASHSCOPE:qwen-max"
def test_filter_problematic_report_rows_keeps_low_coverage_and_no_llm():
rows = [
{
"symbol": "AAPL",
"coverage_pct": 100.0,
"llm_count": 2,
},
{
"symbol": "MSFT",
"coverage_pct": 80.0,
"llm_count": 1,
},
{
"symbol": "NVDA",
"coverage_pct": 100.0,
"llm_count": 0,
},
]
filtered = cli._filter_problematic_report_rows(rows)
assert [row["symbol"] for row in filtered] == ["MSFT", "NVDA"]

View File

@@ -0,0 +1,384 @@
# -*- coding: utf-8 -*-
import json
from types import SimpleNamespace
import pytest
from backend.services.gateway import Gateway
import backend.services.gateway as gateway_module
class DummyWebSocket:
def __init__(self):
self.messages = []
async def send(self, payload: str):
self.messages.append(json.loads(payload))
class DummyStateSync:
def __init__(self, current_date="2026-03-16"):
self.state = {"current_date": current_date}
self.system_messages = []
def set_broadcast_fn(self, _fn):
return None
def update_state(self, *_args, **_kwargs):
return None
async def on_system_message(self, message):
self.system_messages.append(message)
class FakeMarketStore:
def __init__(self):
self.calls = []
def get_news_timeline_enriched(self, symbol, *, start_date=None, end_date=None):
self.calls.append(("get_news_timeline_enriched", symbol, start_date, end_date))
return [{"date": end_date, "count": 2, "source_count": 1, "top_title": "Top", "positive_count": 1}]
def get_news_items(self, symbol, *, start_date=None, end_date=None, limit=100):
self.calls.append(("get_news_items", symbol, start_date, end_date, limit))
return [
{
"id": "news-1",
"ticker": symbol,
"date": end_date,
"trade_date": end_date,
"title": "Title",
"summary": "Summary",
"source": "polygon",
}
]
def get_news_items_enriched(self, symbol, *, start_date=None, end_date=None, trade_date=None, limit=100):
self.calls.append(("get_news_items_enriched", symbol, start_date, end_date, trade_date, limit))
target_date = trade_date or end_date
return [
{
"id": "news-1",
"ticker": symbol,
"date": target_date,
"trade_date": target_date,
"title": "Title",
"summary": "Summary",
"source": "polygon",
"sentiment": "negative",
"relevance": "high",
"key_discussion": "Key discussion",
}
]
def get_news_by_ids_enriched(self, symbol, article_ids):
self.calls.append(("get_news_by_ids_enriched", symbol, list(article_ids)))
return [{"id": article_ids[0], "ticker": symbol, "date": "2026-03-16", "sentiment": "negative"}]
def get_news_categories_enriched(self, symbol, *, start_date=None, end_date=None, limit=200):
self.calls.append(("get_news_categories_enriched", symbol, start_date, end_date, limit))
return {"macro": {"label": "宏观", "count": 1, "article_ids": ["news-1"], "positive_ids": [], "negative_ids": ["news-1"], "neutral_ids": []}}
def get_story_cache(self, symbol, *, as_of_date):
self.calls.append(("get_story_cache", symbol, as_of_date))
return None
def upsert_story_cache(self, symbol, *, as_of_date, content, source="local"):
self.calls.append(("upsert_story_cache", symbol, as_of_date, source))
def delete_story_cache(self, symbol, *, as_of_date=None):
self.calls.append(("delete_story_cache", symbol, as_of_date))
return 1
def get_similar_day_cache(self, symbol, *, target_date):
self.calls.append(("get_similar_day_cache", symbol, target_date))
return None
def upsert_similar_day_cache(self, symbol, *, target_date, payload, source="local"):
self.calls.append(("upsert_similar_day_cache", symbol, target_date, source))
def delete_similar_day_cache(self, symbol, *, target_date=None):
self.calls.append(("delete_similar_day_cache", symbol, target_date))
return 1
def get_ohlc(self, symbol, start_date, end_date):
self.calls.append(("get_ohlc", symbol, start_date, end_date))
return [
{"date": start_date, "open": 100, "high": 105, "low": 99, "close": 103},
{"date": end_date, "open": 103, "high": 108, "low": 102, "close": 107},
]
def make_gateway(market_store=None):
storage = SimpleNamespace(market_store=market_store or FakeMarketStore())
pipeline = SimpleNamespace(state_sync=None)
market_service = SimpleNamespace()
state_sync = DummyStateSync()
return Gateway(
market_service=market_service,
storage_service=storage,
pipeline=pipeline,
state_sync=state_sync,
config={"mode": "live"},
)
@pytest.mark.asyncio
async def test_handle_get_stock_news_timeline_uses_market_store_symbol_argument():
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
await gateway._handle_get_stock_news_timeline(
websocket,
{"ticker": "AAPL", "lookback_days": 30},
)
assert market_store.calls == [
("get_news_timeline_enriched", "AAPL", "2026-02-14", "2026-03-16")
]
assert websocket.messages[-1]["type"] == "stock_news_timeline_loaded"
assert websocket.messages[-1]["ticker"] == "AAPL"
@pytest.mark.asyncio
async def test_handle_get_stock_news_categories_uses_market_store_symbol_argument(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
await gateway._handle_get_stock_news_categories(
websocket,
{"ticker": "AAPL", "lookback_days": 30},
)
assert market_store.calls == [
("get_news_items_enriched", "AAPL", "2026-02-14", "2026-03-16", None, 200),
("get_news_categories_enriched", "AAPL", "2026-02-14", "2026-03-16", 200)
]
assert websocket.messages[-1]["type"] == "stock_news_categories_loaded"
assert websocket.messages[-1]["categories"]["macro"]["count"] == 1
@pytest.mark.asyncio
async def test_handle_get_stock_range_explain_uses_market_store_rows(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
def fake_build_range_explanation(*, ticker, start_date, end_date, news_rows):
return {
"ticker": ticker,
"start_date": start_date,
"end_date": end_date,
"news_count": len(news_rows),
}
monkeypatch.setattr(
gateway_module,
"build_range_explanation",
fake_build_range_explanation,
)
await gateway._handle_get_stock_range_explain(
websocket,
{"ticker": "AAPL", "start_date": "2026-03-10", "end_date": "2026-03-16"},
)
assert market_store.calls == [
("get_news_items_enriched", "AAPL", "2026-03-10", "2026-03-16", None, 100)
]
assert websocket.messages[-1] == {
"type": "stock_range_explain_loaded",
"ticker": "AAPL",
"result": {
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"news_count": 1,
},
}
@pytest.mark.asyncio
async def test_handle_get_stock_range_explain_uses_article_ids_path(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"build_range_explanation",
lambda **kwargs: {"news_count": len(kwargs["news_rows"])},
)
await gateway._handle_get_stock_range_explain(
websocket,
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"article_ids": ["news-99"],
},
)
assert market_store.calls == [("get_news_by_ids_enriched", "AAPL", ["news-99"])]
assert websocket.messages[-1]["result"]["news_count"] == 1
@pytest.mark.asyncio
async def test_handle_get_stock_news_for_date_uses_trade_date_lookup():
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
await gateway._handle_get_stock_news_for_date(
websocket,
{"ticker": "AAPL", "date": "2026-03-16", "limit": 10},
)
assert market_store.calls == [
("get_news_items_enriched", "AAPL", None, None, "2026-03-16", 10)
]
assert websocket.messages[-1]["type"] == "stock_news_for_date_loaded"
assert websocket.messages[-1]["date"] == "2026-03-16"
@pytest.mark.asyncio
async def test_handle_get_stock_story_returns_story_payload(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"enrich_news_for_symbol",
lambda *args, **kwargs: {"symbol": "AAPL", "analyzed": 3},
)
await gateway._handle_get_stock_story(
websocket,
{"ticker": "AAPL", "as_of_date": "2026-03-16"},
)
assert websocket.messages[-1]["type"] == "stock_story_loaded"
assert websocket.messages[-1]["ticker"] == "AAPL"
assert "AAPL Story" in websocket.messages[-1]["story"]
@pytest.mark.asyncio
async def test_handle_get_stock_similar_days_returns_items(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"enrich_news_for_symbol",
lambda *args, **kwargs: {"symbol": "AAPL", "analyzed": 3},
)
await gateway._handle_get_stock_similar_days(
websocket,
{"ticker": "AAPL", "date": "2026-03-16", "top_k": 5},
)
assert websocket.messages[-1]["type"] == "stock_similar_days_loaded"
assert websocket.messages[-1]["ticker"] == "AAPL"
assert isinstance(websocket.messages[-1]["items"], list)
@pytest.mark.asyncio
async def test_handle_run_stock_enrich_rebuilds_caches(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"enrich_news_for_symbol",
lambda *args, **kwargs: {"symbol": "AAPL", "analyzed": 2, "queued_count": 2},
)
await gateway._handle_run_stock_enrich(
websocket,
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"force": True,
"rebuild_story": True,
"rebuild_similar_days": True,
"story_date": "2026-03-16",
"target_date": "2026-03-16",
},
)
assert ("delete_story_cache", "AAPL", "2026-03-16") in market_store.calls
assert ("delete_similar_day_cache", "AAPL", "2026-03-16") in market_store.calls
assert websocket.messages[-1]["type"] == "stock_enrich_completed"
assert websocket.messages[-1]["stats"]["analyzed"] == 2
@pytest.mark.asyncio
async def test_handle_run_stock_enrich_rejects_local_to_llm_without_llm(monkeypatch):
gateway = make_gateway(FakeMarketStore())
websocket = DummyWebSocket()
monkeypatch.setattr(gateway_module, "llm_enrichment_enabled", lambda: False)
await gateway._handle_run_stock_enrich(
websocket,
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"only_local_to_llm": True,
},
)
assert websocket.messages[-1]["type"] == "stock_enrich_completed"
assert "requires EXPLAIN_ENRICH_USE_LLM=true" in websocket.messages[-1]["error"]
def test_schedule_watchlist_market_store_refresh_creates_task(monkeypatch):
gateway = make_gateway()
captured = {}
class DummyTask:
def done(self):
return False
def cancel(self):
captured["cancelled"] = True
def fake_create_task(coro):
captured["coro_name"] = coro.cr_code.co_name
coro.close()
return DummyTask()
monkeypatch.setattr(gateway_module.asyncio, "create_task", fake_create_task)
gateway._schedule_watchlist_market_store_refresh(["AAPL", "MSFT"])
assert captured["coro_name"] == "_refresh_market_store_for_watchlist"
@pytest.mark.asyncio
async def test_refresh_market_store_for_watchlist_emits_system_messages(monkeypatch):
gateway = make_gateway()
monkeypatch.setattr(
gateway_module,
"ingest_symbols",
lambda symbols, mode="incremental": [
{"symbol": symbol, "prices": 3, "news": 4, "aligned": 4}
for symbol in symbols
],
)
await gateway._refresh_market_store_for_watchlist(["AAPL", "MSFT"])
assert gateway.state_sync.system_messages[0] == "正在同步自选股市场数据: AAPL, MSFT"
assert "自选股市场数据已同步:" in gateway.state_sync.system_messages[1]
assert "AAPL prices=3 news=4" in gateway.state_sync.system_messages[1]

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
from unittest.mock import patch
import pandas as pd
from backend.data.historical_price_manager import HistoricalPriceManager
def test_preload_data_prefers_market_db():
manager = HistoricalPriceManager()
manager.subscribe(["AAPL"])
market_rows = [
{
"symbol": "AAPL",
"date": "2026-03-09",
"open": 100.0,
"high": 103.0,
"low": 99.0,
"close": 102.0,
"volume": 10_000,
"vwap": 101.0,
"transactions": 500,
"source": "polygon",
}
]
with (
patch.object(manager._market_store, "get_ohlc", return_value=market_rows),
patch.object(manager._router, "load_local_price_frame") as load_csv,
):
manager.preload_data("2026-03-01", "2026-03-10")
load_csv.assert_not_called()
assert "AAPL" in manager._price_cache
assert float(manager._price_cache["AAPL"].iloc[0]["close"]) == 102.0
def test_preload_data_falls_back_to_csv():
manager = HistoricalPriceManager()
manager.subscribe(["MSFT"])
csv_df = pd.DataFrame(
{
"time": ["2026-03-09"],
"open": [200.0],
"high": [205.0],
"low": [198.0],
"close": [204.0],
"volume": [20_000],
}
)
csv_df["time"] = pd.to_datetime(csv_df["time"])
csv_df["Date"] = csv_df["time"]
csv_df.set_index("Date", inplace=True)
with (
patch.object(manager._market_store, "get_ohlc", return_value=[]),
patch.object(manager._router, "load_local_price_frame", return_value=csv_df) as load_csv,
):
manager.preload_data("2026-03-01", "2026-03-10")
load_csv.assert_called_once_with("MSFT")
assert "MSFT" in manager._price_cache
assert float(manager._price_cache["MSFT"].iloc[0]["close"]) == 204.0

View File

@@ -0,0 +1,133 @@
# -*- coding: utf-8 -*-
from backend.enrich import llm_enricher
class DummyResponse:
def __init__(self, metadata):
self.metadata = metadata
class DummyModel:
def __init__(self, metadata):
self.metadata = metadata
self.calls = []
async def __call__(self, messages, structured_model=None, **kwargs):
self.calls.append(
{
"messages": messages,
"structured_model": structured_model,
"kwargs": kwargs,
}
)
return DummyResponse(self.metadata)
def test_analyze_news_row_with_llm_uses_agentscope_model(monkeypatch):
model = DummyModel(
{
"id": "news-1",
"relevance": "high",
"sentiment": "positive",
"key_discussion": "Demand remains resilient",
"summary": "Structured summary",
"reason_growth": "Orders improved",
"reason_decrease": "",
}
)
monkeypatch.setattr(llm_enricher, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(llm_enricher, "_get_explain_model", lambda: model)
monkeypatch.setattr(
llm_enricher,
"get_explain_model_info",
lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"},
)
result = llm_enricher.analyze_news_row_with_llm(
{
"id": "news-1",
"title": "Apple expands AI features",
"summary": "New devices and software updates were announced.",
}
)
assert result["sentiment"] == "positive"
assert result["summary"] == "Structured summary"
assert result["raw_json"]["model_label"] == "DASHSCOPE:qwen-max"
assert model.calls
assert model.calls[0]["structured_model"] is llm_enricher.EnrichedNewsItem
def test_analyze_news_rows_with_llm_uses_agentscope_structured_batch(monkeypatch):
model = DummyModel(
{
"items": [
{
"id": "news-1",
"relevance": "high",
"sentiment": "negative",
"key_discussion": "Margin pressure",
"summary": "Batch summary",
"reason_growth": "",
"reason_decrease": "Costs rose",
}
]
}
)
monkeypatch.setattr(llm_enricher, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(llm_enricher, "_get_explain_model", lambda: model)
monkeypatch.setattr(
llm_enricher,
"get_explain_model_info",
lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"},
)
result = llm_enricher.analyze_news_rows_with_llm(
[
{
"id": "news-1",
"title": "Apple margins pressured",
"summary": "Costs increased this quarter.",
}
]
)
assert result["news-1"]["sentiment"] == "negative"
assert result["news-1"]["reason_decrease"] == "Costs rose"
assert result["news-1"]["raw_json"]["model_label"] == "DASHSCOPE:qwen-max"
assert model.calls
assert model.calls[0]["structured_model"] is llm_enricher.EnrichedNewsBatch
def test_analyze_range_with_llm_uses_agentscope_structured_output(monkeypatch):
model = DummyModel(
{
"summary": "该股在区间内震荡下行,相关新闻主要集中在盈利预期和供应链扰动。",
"trend_analysis": "前半段受利空新闻压制,后半段跌幅收敛。",
"bullish_factors": ["估值消化后出现部分承接"],
"bearish_factors": ["盈利预期下修", "供应链扰动持续"],
}
)
monkeypatch.setattr(llm_enricher, "llm_range_analysis_enabled", lambda: True)
monkeypatch.setattr(llm_enricher, "_get_explain_model", lambda: model)
monkeypatch.setattr(
llm_enricher,
"get_explain_model_info",
lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"},
)
result = llm_enricher.analyze_range_with_llm(
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"price_change_pct": -3.42,
}
)
assert result["summary"].startswith("该股在区间内震荡下行")
assert result["model_label"] == "DASHSCOPE:qwen-max"
assert result["bearish_factors"] == ["盈利预期下修", "供应链扰动持续"]
assert model.calls
assert model.calls[0]["structured_model"] is llm_enricher.RangeAnalysisPayload

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
from pathlib import Path
from backend.data.market_store import MarketStore
def test_get_enrich_report_summarizes_coverage(tmp_path: Path):
store = MarketStore(tmp_path / "market_research.db")
store.upsert_news(
"AAPL",
[
{
"id": "news-1",
"published_utc": "2026-03-10T12:00:00Z",
"title": "Apple earnings beat",
"summary": "Revenue topped expectations",
"tickers": ["AAPL"],
},
{
"id": "news-2",
"published_utc": "2026-03-11T12:00:00Z",
"title": "Apple supply chain warning",
"summary": "Outlook softened",
"tickers": ["AAPL"],
},
],
)
store.set_trade_dates(
[
{"news_id": "news-1", "symbol": "AAPL", "trade_date": "2026-03-10"},
{"news_id": "news-2", "symbol": "AAPL", "trade_date": "2026-03-11"},
]
)
store.upsert_news_analysis(
"AAPL",
[
{
"news_id": "news-1",
"trade_date": "2026-03-10",
"summary": "LLM enriched",
"analysis_source": "llm",
}
],
analysis_source="llm",
)
rows = store.get_enrich_report(["AAPL"])
assert len(rows) == 1
assert rows[0]["symbol"] == "AAPL"
assert rows[0]["raw_news_count"] == 2
assert rows[0]["analyzed_news_count"] == 1
assert rows[0]["coverage_pct"] == 50.0
assert rows[0]["llm_count"] == 1

View File

@@ -0,0 +1,174 @@
# -*- coding: utf-8 -*-
from backend.enrich import news_enricher
def test_classify_news_row_falls_back_to_local_rules(monkeypatch):
monkeypatch.setattr(news_enricher, "analyze_news_row_with_llm", lambda row: None)
result = news_enricher.classify_news_row(
{
"title": "Apple shares drop after weak guidance",
"summary": "Investors reacted negatively to softer-than-expected outlook.",
}
)
assert result["analysis_source"] == "local"
assert result["sentiment"] == "negative"
assert result["summary"]
def test_classify_news_row_prefers_llm_when_available(monkeypatch):
monkeypatch.setattr(
news_enricher,
"analyze_news_row_with_llm",
lambda row: {
"relevance": "high",
"sentiment": "positive",
"key_discussion": "Demand resilience",
"summary": "LLM summary",
"reason_growth": "Orders remain strong",
"reason_decrease": "",
"raw_json": {"provider": "llm"},
},
)
result = news_enricher.classify_news_row(
{
"title": "Apple expands AI features",
"summary": "New devices and software updates were announced.",
}
)
assert result["analysis_source"] == "llm"
assert result["sentiment"] == "positive"
assert result["summary"] == "LLM summary"
def test_build_analysis_rows_prefers_batch_llm_and_dedupes(monkeypatch):
monkeypatch.setattr(news_enricher, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(news_enricher, "get_env_int", lambda key, default=0: 8)
monkeypatch.setattr(
news_enricher,
"analyze_news_rows_with_llm",
lambda rows: {
"news-1": {
"relevance": "high",
"sentiment": "positive",
"key_discussion": "Batch result",
"summary": "Batch summary",
"reason_growth": "Growth",
"reason_decrease": "",
"raw_json": {"provider": "batch"},
}
},
)
monkeypatch.setattr(news_enricher, "analyze_news_row_with_llm", lambda row: None)
rows = news_enricher.build_analysis_rows(
symbol="AAPL",
news_rows=[
{"id": "news-1", "trade_date": "2026-03-10", "title": "Same title", "summary": "Same summary"},
{"id": "news-2", "trade_date": "2026-03-10", "title": "Same title", "summary": "Same summary"},
],
ohlc_rows=[],
)
rows, stats = rows
assert len(rows) == 1
assert rows[0]["analysis_source"] == "llm"
assert rows[0]["summary"] == "Batch summary"
assert stats["deduped_count"] == 1
assert stats["llm_count"] == 1
def test_enrich_news_for_symbol_skips_existing(monkeypatch):
class DummyStore:
def get_news_items(self, symbol, start_date=None, end_date=None, limit=200):
return [
{"id": "news-1", "trade_date": "2026-03-10", "title": "One", "summary": "One"},
{"id": "news-2", "trade_date": "2026-03-11", "title": "Two", "summary": "Two"},
]
def get_analyzed_news_ids(self, symbol, start_date=None, end_date=None):
return {"news-1"}
def get_ohlc(self, symbol, start_date, end_date):
return []
def upsert_news_analysis(self, symbol, rows, analysis_source="local"):
self.rows = rows
return len(rows)
monkeypatch.setattr(
news_enricher,
"build_analysis_rows",
lambda symbol, news_rows, ohlc_rows: (
[
{
"news_id": row["id"],
"trade_date": row["trade_date"],
"summary": row["summary"],
"analysis_source": "local",
}
for row in news_rows
],
{"deduped_count": 0, "llm_count": 0, "local_count": len(news_rows)},
),
)
store = DummyStore()
result = news_enricher.enrich_news_for_symbol(store, "AAPL")
assert result["news_count"] == 2
assert result["queued_count"] == 1
assert result["skipped_existing_count"] == 1
assert len(store.rows) == 1
assert store.rows[0]["news_id"] == "news-2"
def test_enrich_news_for_symbol_only_reanalyzes_local(monkeypatch):
class DummyStore:
def get_news_items(self, symbol, start_date=None, end_date=None, limit=200):
return [
{"id": "news-1", "trade_date": "2026-03-10", "title": "One", "summary": "One"},
{"id": "news-2", "trade_date": "2026-03-11", "title": "Two", "summary": "Two"},
{"id": "news-3", "trade_date": "2026-03-12", "title": "Three", "summary": "Three"},
]
def get_analyzed_news_sources(self, symbol, start_date=None, end_date=None):
return {"news-1": "local", "news-2": "llm"}
def get_ohlc(self, symbol, start_date, end_date):
return []
def upsert_news_analysis(self, symbol, rows, analysis_source="local"):
self.rows = rows
return len(rows)
monkeypatch.setattr(
news_enricher,
"build_analysis_rows",
lambda symbol, news_rows, ohlc_rows: (
[
{
"news_id": row["id"],
"trade_date": row["trade_date"],
"summary": row["summary"],
"analysis_source": "llm" if row["id"] == "news-1" else "local",
}
for row in news_rows
],
{"deduped_count": 0, "llm_count": 1, "local_count": 0},
),
)
store = DummyStore()
result = news_enricher.enrich_news_for_symbol(
store,
"AAPL",
only_reanalyze_local=True,
)
assert result["news_count"] == 3
assert result["queued_count"] == 1
assert result["skipped_existing_count"] == 2
assert result["only_reanalyze_local"] is True
assert result["upgraded_local_to_llm_count"] == 1
assert result["execution_summary"]["upgraded_dates"] == ["2026-03-10"]
assert result["execution_summary"]["remaining_local_titles"] == []
assert result["execution_summary"]["skipped_missing_analysis_count"] == 1
assert result["execution_summary"]["skipped_non_local_count"] == 1
assert [row["news_id"] for row in store.rows] == ["news-1"]

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
from types import SimpleNamespace
from backend.explain import range_explainer
def test_build_range_explanation_prefers_llm_text_when_available(monkeypatch):
monkeypatch.setattr(
range_explainer,
"get_prices",
lambda ticker, start_date, end_date: [
SimpleNamespace(open=100, close=98, high=102, low=97, volume=1000),
SimpleNamespace(open=98, close=96, high=99, low=95, volume=1100),
SimpleNamespace(open=96, close=97, high=98, low=94, volume=1200),
],
)
monkeypatch.setattr(
range_explainer,
"analyze_range_with_llm",
lambda payload: {
"summary": "区间内整体偏弱,主题集中在盈利预期和供应链风险。",
"trend_analysis": "前半段快速下探,后半段出现修复。",
"bullish_factors": ["回调后出现承接"],
"bearish_factors": ["盈利预期承压"],
"model_label": "DASHSCOPE:qwen-max",
},
)
result = range_explainer.build_range_explanation(
ticker="AAPL",
start_date="2026-03-10",
end_date="2026-03-16",
news_rows=[
{
"id": "news-1",
"trade_date": "2026-03-10",
"title": "Apple margin pressure concerns grow",
"summary": "Investors focused on weaker margin outlook.",
"sentiment": "negative",
"relevance": "high",
"ret_t0": -0.02,
"reason_decrease": "盈利预期承压",
"category": "earnings",
}
],
)
assert result["analysis"]["summary"] == "区间内整体偏弱,主题集中在盈利预期和供应链风险。"
assert result["analysis"]["trend_analysis"] == "前半段快速下探,后半段出现修复。"
assert result["analysis"]["bullish_factors"] == ["回调后出现承接"]
assert result["analysis"]["analysis_source"] == "llm"
assert result["analysis"]["analysis_model_label"] == "DASHSCOPE:qwen-max"
assert result["news_count"] == 1

View File

@@ -13,11 +13,17 @@ FIN_DATA_SOURCE = # Preferred source: finnhub / financial_datasets / yfinance /
ENABLED_DATA_SOURCES = # Optional allowlist, comma-separated, e.g. financial_datasets,finnhub,yfinance,local_csv | 可启用数据源列表
FINANCIAL_DATASETS_API_KEY= #required | 必填
FINNHUB_API_KEY= #optional | 可选
POLYGON_API_KEY= #optional for market warehouse ingest | Polygon长期市场库采集可选
MARKET_DB_PATH= #optional path for long-lived market_research.db | 长期市场数据库路径可选
# Model API
OPENAI_API_KEY=
OPENAI_BASE_URL=
MODEL_NAME=qwen3-max-preview
EXPLAIN_ENRICH_USE_LLM=false
EXPLAIN_ENRICH_MODEL_PROVIDER=
EXPLAIN_ENRICH_MODEL_NAME=
EXPLAIN_RANGE_USE_LLM=
#记忆模块Embedding and llm calls for Reme memory)
# default to use aliyun dashscope url, more details: https://help.aliyun.com/zh/model-studio/what-is-model-studio

View File

@@ -7,6 +7,8 @@
"dev": "vite",
"build": "vite build",
"lint": "eslint .",
"test": "vitest run",
"test:watch": "vitest",
"preview": "vite preview",
"preview:host": "vite preview --host"
},
@@ -37,6 +39,7 @@
},
"devDependencies": {
"@eslint/js": "^9.33.0",
"@testing-library/react": "^16.3.2",
"@types/react": "^19.1.13",
"@types/react-dom": "^19.1.9",
"@vitejs/plugin-react": "^5.0.0",
@@ -45,11 +48,13 @@
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.3.0",
"jsdom": "^29.0.0",
"postcss": "^8.5.6",
"tailwindcss": "^3.4.17",
"tailwindcss-animate": "^1.0.7",
"typescript": "^5.9.2",
"vite": "^7.1.2",
"vite-tsconfig-paths": "^5.1.4"
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^4.1.0"
}
}

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useMemo, useRef, useState, useCallback } from "react";
import React, { Suspense, lazy, useEffect, useMemo, useRef, useState, useCallback } from "react";
// Configuration and constants
import { AGENTS, INITIAL_TICKERS } from './config/constants';
@@ -13,19 +13,40 @@ import { useFeedProcessor } from './hooks/useFeedProcessor';
import GlobalStyles from './styles/GlobalStyles';
// Components
import RoomView from './components/RoomView';
import NetValueChart from './components/NetValueChart';
import AgentFeed from './components/AgentFeed';
import StockLogo from './components/StockLogo';
import StatisticsView from './components/StatisticsView';
import PerformanceView from './components/PerformanceView';
import StockExplainView from './components/StockExplainView.jsx';
import Header from './components/Header.jsx';
import WatchlistPanel from './components/WatchlistPanel.jsx';
// Utils
import { formatNumber, formatTickerPrice } from './utils/formatters';
const RoomView = lazy(() => import('./components/RoomView'));
const AgentFeed = lazy(() => import('./components/AgentFeed'));
const StatisticsView = lazy(() => import('./components/StatisticsView'));
const StockExplainView = lazy(() => import('./components/StockExplainView.jsx'));
function ViewLoadingFallback({ label = '加载中...' }) {
return (
<div
style={{
minHeight: 240,
height: '100%',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
border: '1px solid #000000',
background: '#ffffff',
fontSize: 12,
fontWeight: 700,
letterSpacing: 0.4
}}
>
{label}
</div>
);
}
/**
* Live Trading Intelligence Platform - Read-Only Dashboard
* Geek Style - Terminal-inspired, minimal, monochrome
@@ -73,6 +94,7 @@ export default function LiveTradingApp() {
const [priceHistoryByTicker, setPriceHistoryByTicker] = useState({});
const [ohlcHistoryByTicker, setOhlcHistoryByTicker] = useState({});
const [explainEventsByTicker, setExplainEventsByTicker] = useState({});
const [newsByTicker, setNewsByTicker] = useState({});
const [selectedExplainSymbol, setSelectedExplainSymbol] = useState('');
const [historySourceByTicker, setHistorySourceByTicker] = useState({});
@@ -237,35 +259,59 @@ export default function LiveTradingApp() {
const raw = typeof marketStatus.status_text === 'string' ? marketStatus.status_text.trim() : '';
const normalized = raw.toLowerCase();
if (normalized === 'market closed (non-trading day)') {
return '休市';
const byStatus = {
open: '开盘',
closed: '休市',
premarket: '盘前',
afterhours: '盘后',
};
const byText = {
'market closed (non-trading day)': '休市',
'market open': '开盘',
'market closed': '收盘',
'pre-market': '盘前',
'after-hours': '盘后',
'after hours': '盘后',
'backtest mode': '回测模式',
};
if (normalized && byText[normalized]) {
return byText[normalized];
}
if (normalized === 'market open') {
return '开盘';
if (marketStatus.status && byStatus[marketStatus.status]) {
return byStatus[marketStatus.status];
}
if (normalized === 'market closed') {
return '收盘';
}
return raw || (marketStatus.status === 'open' ? '开盘' : '收盘');
return raw || '状态未知';
}, [marketStatus]);
const priceSourceLabel = useMemo(() => {
const providerLabelMap = useMemo(() => ({
yfinance: 'YFinance',
finnhub: 'Finnhub',
financial_datasets: 'Financial Datasets',
local_csv: 'CSV',
polygon: 'Polygon',
mock: 'Mock',
backtest: 'Backtest'
}), []);
const livePriceSourceLabel = useMemo(() => {
const source = marketStatus?.live_quote_provider;
if (!source) {
return null;
}
const normalized = String(source).trim().toLowerCase();
return `实时 ${providerLabelMap[normalized] || String(source).trim()}`;
}, [marketStatus, providerLabelMap]);
const historicalPriceSourceLabel = useMemo(() => {
const source = dataSources?.last_success?.prices;
if (!source) {
return null;
}
const normalized = String(source).trim().toLowerCase();
const labels = {
yfinance: '数据源 Yahoo',
finnhub: '数据源 Finnhub',
financial_datasets: '数据源 Financial Datasets',
local_csv: '数据源 CSV'
};
return labels[normalized] || `数据源 ${String(source).trim()}`;
}, [dataSources]);
return `历史 ${providerLabelMap[normalized] || String(source).trim()}`;
}, [dataSources, providerLabelMap]);
const parseWatchlistInput = useCallback((value) => {
if (typeof value !== 'string') {
@@ -413,6 +459,131 @@ export default function LiveTradingApp() {
});
}, []);
const requestStockNews = useCallback((symbol) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_news',
ticker: normalized,
lookback_days: 45,
limit: 12
});
}, []);
const requestStockNewsForDate = useCallback((symbol, date) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !date || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_news_for_date',
ticker: normalized,
date,
limit: 20
});
}, []);
const requestStockNewsTimeline = useCallback((symbol) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_news_timeline',
ticker: normalized,
lookback_days: 90
});
}, []);
const requestStockNewsCategories = useCallback((symbol) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_news_categories',
ticker: normalized,
lookback_days: 90
});
}, []);
const requestStockRangeExplain = useCallback((symbol, startDate, endDate, articleIds = []) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !startDate || !endDate || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_range_explain',
ticker: normalized,
start_date: startDate,
end_date: endDate,
article_ids: Array.isArray(articleIds) ? articleIds : []
});
}, []);
const requestStockStory = useCallback((symbol, asOfDate = null) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_story',
ticker: normalized,
as_of_date: asOfDate
});
}, []);
const requestStockSimilarDays = useCallback((symbol, date, topK = 8) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !date || !clientRef.current) {
return false;
}
return clientRef.current.send({
type: 'get_stock_similar_days',
ticker: normalized,
date,
top_k: topK
});
}, []);
const requestStockEnrich = useCallback((symbol, options = {}) => {
const normalized = typeof symbol === 'string' ? symbol.trim().toUpperCase() : '';
if (!normalized || !clientRef.current) {
return false;
}
const startDate = typeof options.startDate === 'string' ? options.startDate.trim() : '';
const endDate = typeof options.endDate === 'string' ? options.endDate.trim() : '';
if (!startDate || !endDate) {
return false;
}
setNewsByTicker((prev) => ({
...prev,
[normalized]: {
...(prev[normalized] || {}),
maintenanceStatus: {
running: true,
error: null,
updatedAt: new Date().toISOString(),
stats: null
}
}
}));
return clientRef.current.send({
type: 'run_stock_enrich',
ticker: normalized,
start_date: startDate,
end_date: endDate,
force: Boolean(options.force),
only_local_to_llm: Boolean(options.onlyLocalToLlm),
rebuild_story: Boolean(options.rebuildStory),
rebuild_similar_days: Boolean(options.rebuildSimilarDays),
story_date: options.storyDate || null,
target_date: options.targetDate || null
});
}, []);
// Switch away from LIVE tab when market closes
useEffect(() => {
if (!isLiveEnabled && chartTab === 'live') {
@@ -439,7 +610,21 @@ export default function LiveTradingApp() {
}
requestStockHistory(selectedExplainSymbol);
requestStockExplainEvents(selectedExplainSymbol);
}, [currentView, requestStockExplainEvents, requestStockHistory, selectedExplainSymbol]);
requestStockNews(selectedExplainSymbol);
requestStockNewsTimeline(selectedExplainSymbol);
requestStockNewsCategories(selectedExplainSymbol);
requestStockStory(selectedExplainSymbol, currentDate);
}, [
currentDate,
currentView,
requestStockExplainEvents,
requestStockHistory,
requestStockNews,
requestStockNewsCategories,
requestStockNewsTimeline,
requestStockStory,
selectedExplainSymbol
]);
// Clock - use virtual time if available (for mock mode)
useEffect(() => {
@@ -802,6 +987,193 @@ export default function LiveTradingApp() {
}));
},
stock_news_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
if (!symbol) {
return;
}
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
items: Array.isArray(e.news) ? e.news : [],
source: e.source || null,
startDate: e.start_date || null,
endDate: e.end_date || null
}
}));
requestStockNewsTimeline(symbol);
},
stock_news_for_date_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
const date = typeof e.date === 'string' ? e.date.trim() : '';
if (!symbol || !date) {
return;
}
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
byDate: {
...((prev[symbol] && prev[symbol].byDate) || {}),
[date]: Array.isArray(e.news) ? e.news : []
}
}
}));
},
stock_news_timeline_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
if (!symbol) {
return;
}
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
timeline: Array.isArray(e.timeline) ? e.timeline : [],
timelineStartDate: e.start_date || null,
timelineEndDate: e.end_date || null
}
}));
},
stock_news_categories_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
if (!symbol) {
return;
}
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
categories: e.categories || {},
categoriesStartDate: e.start_date || null,
categoriesEndDate: e.end_date || null
}
}));
},
stock_range_explain_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
if (!symbol) {
return;
}
const result = e.result && typeof e.result === 'object' ? e.result : null;
if (!result?.start_date || !result?.end_date) {
return;
}
const cacheKey = `${result.start_date}:${result.end_date}`;
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
rangeExplainCache: {
...((prev[symbol] && prev[symbol].rangeExplainCache) || {}),
[cacheKey]: result
}
}
}));
},
stock_story_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
const asOfDate = typeof e.as_of_date === 'string' ? e.as_of_date.trim() : '';
if (!symbol || !asOfDate) {
return;
}
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
storyCache: {
...((prev[symbol] && prev[symbol].storyCache) || {}),
[asOfDate]: {
story: e.story || '',
source: e.source || null,
asOfDate
}
}
}
}));
},
stock_similar_days_loaded: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
const date = typeof e.target_date === 'string'
? e.target_date.trim()
: typeof e.date === 'string'
? e.date.trim()
: '';
if (!symbol || !date) {
return;
}
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
similarDaysCache: {
...((prev[symbol] && prev[symbol].similarDaysCache) || {}),
[date]: {
target_features: e.target_features || {},
items: Array.isArray(e.items) ? e.items : [],
error: e.error || null
}
}
}
}));
},
stock_enrich_completed: (e) => {
const symbol = typeof e.ticker === 'string' ? e.ticker.trim().toUpperCase() : '';
if (!symbol) {
return;
}
const completedAt = new Date().toISOString();
const historyEntry = {
timestamp: completedAt,
startDate: e.start_date || '',
endDate: e.end_date || '',
force: Boolean(e.force),
onlyLocalToLlm: Boolean(e.only_local_to_llm),
error: e.error || null,
stats: e.stats || null,
storyStatus: e.story_status || null,
similarStatus: e.similar_status || null
};
setNewsByTicker((prev) => ({
...prev,
[symbol]: {
...(prev[symbol] || {}),
items: [],
byDate: {},
timeline: [],
categories: {},
rangeExplainCache: {},
storyCache: {},
similarDaysCache: {},
maintenanceStatus: {
running: false,
error: e.error || null,
updatedAt: completedAt,
stats: e.stats || null,
storyStatus: e.story_status || null,
similarStatus: e.similar_status || null
},
maintenanceHistory: [
historyEntry,
...(((prev[symbol] && prev[symbol].maintenanceHistory) || []).slice(0, 7))
]
}
}));
if (!e.error) {
requestStockNews(symbol);
requestStockNewsTimeline(symbol);
requestStockNewsCategories(symbol);
}
},
// Real-time price updates
price_update: (e) => {
try {
@@ -1146,7 +1518,14 @@ export default function LiveTradingApp() {
clientRef.current.disconnect();
}
};
}, [addSystemMessage, buildTickersFromSymbols, processFeedEvent, processHistoricalFeed]); // Only reconnect if handlers change
}, [
addSystemMessage,
buildTickersFromSymbols,
processFeedEvent,
processHistoricalFeed,
requestStockNewsCategories,
requestStockNewsTimeline
]); // Only reconnect if handlers change
// Resizing handlers
const handleMouseDown = (e) => {
@@ -1318,16 +1697,24 @@ export default function LiveTradingApp() {
</span>
</>
)}
{priceSourceLabel && (
{livePriceSourceLabel && (
<>
<span className="status-sep">·</span>
<span className="market-text backtest">
{priceSourceLabel}
{livePriceSourceLabel}
</span>
</>
)}
{historicalPriceSourceLabel && (
<>
<span className="status-sep">·</span>
<span className="market-text backtest">
{historicalPriceSourceLabel}
</span>
</>
)}
<span className="status-sep">·</span>
<span className="time-text">{lastUpdate.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false })}</span>
<span className="time-text">{now.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false })}</span>
</div>
<WatchlistPanel
@@ -1407,7 +1794,7 @@ export default function LiveTradingApp() {
className={`view-nav-btn ${currentView === 'explain' ? 'active' : ''}`}
onClick={() => setCurrentView('explain')}
>
个股解释
个股分析
</button>
<button
@@ -1437,6 +1824,7 @@ export default function LiveTradingApp() {
} ${!isInitialAnimating ? 'normal-speed' : ''}`}>
{/* Room View Panel */}
<div className="view-panel">
<Suspense fallback={<ViewLoadingFallback label="加载交易室..." />}>
<RoomView
bubbles={bubbles}
bubbleFor={bubbleFor}
@@ -1444,10 +1832,12 @@ export default function LiveTradingApp() {
feed={feed}
onJumpToMessage={handleJumpToMessage}
/>
</Suspense>
</div>
{/* Stock Explain View Panel */}
<div className="view-panel">
<Suspense fallback={<ViewLoadingFallback label="加载个股分析..." />}>
<StockExplainView
tickers={displayTickers}
holdings={holdings}
@@ -1460,7 +1850,15 @@ export default function LiveTradingApp() {
onSelectedSymbolChange={setSelectedExplainSymbol}
selectedHistorySource={historySourceByTicker[selectedExplainSymbol] || null}
explainEventsSnapshot={explainEventsByTicker[selectedExplainSymbol] || null}
newsSnapshot={newsByTicker[selectedExplainSymbol] || null}
onRequestRangeExplain={requestStockRangeExplain}
onRequestNewsForDate={requestStockNewsForDate}
onRequestStory={requestStockStory}
currentDate={currentDate}
onRequestSimilarDays={requestStockSimilarDays}
onRequestStockEnrich={requestStockEnrich}
/>
</Suspense>
</div>
{/* Chart View Panel */}
@@ -1502,6 +1900,7 @@ export default function LiveTradingApp() {
{/* Statistics View Panel */}
<div className="view-panel">
<Suspense fallback={<ViewLoadingFallback label="加载统计视图..." />}>
<StatisticsView
trades={trades}
holdings={holdings}
@@ -1510,6 +1909,7 @@ export default function LiveTradingApp() {
equity={portfolioData.equity}
leaderboard={leaderboard}
/>
</Suspense>
</div>
</div>
</div>
@@ -1524,7 +1924,9 @@ export default function LiveTradingApp() {
{/* Right Panel: Agent Feed */}
<div className="right-panel" style={{ width: `${100 - leftWidth}%` }}>
<Suspense fallback={<ViewLoadingFallback label="加载消息流..." />}>
<AgentFeed ref={agentFeedRef} feed={feed} leaderboard={leaderboard} />
</Suspense>
</div>
</div>
</>

File diff suppressed because it is too large Load Diff

View File

@@ -20,7 +20,7 @@ export default function WatchlistPanel({
onSave
}) {
return (
<div style={{ display: 'flex', alignItems: 'center', gap: 8, minWidth: 0, position: 'relative' }}>
<div style={{ display: 'flex', alignItems: 'center', gap: 8, minWidth: 0, position: 'relative', marginLeft: -6 }}>
<button
onClick={onToggle}
style={{
@@ -36,7 +36,7 @@ export default function WatchlistPanel({
whiteSpace: 'nowrap'
}}
>
WATCHLIST
自选股
</button>
{isOpen && (

View File

@@ -0,0 +1,157 @@
import React from 'react';
import { formatDateTime } from '../../utils/formatters';
export default function ExplainEventsSection({
explainTimeline,
isOpen,
onToggle,
availableEventDates,
selectedEventDate,
onSelectEventDate,
eventCategoryCounts,
activeEventCategory,
onSelectEventCategory,
eventCategoryMeta,
visibleExplainEvents,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">关键事件时间线</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
图上点击事件点可切换对应日期
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起关键事件' : `展开关键事件 ${explainTimeline.length}`}
</button>
</div>
</div>
{explainTimeline.length === 0 ? (
<div className="empty-state">当前还没有可以串起来看的关键事件</div>
) : !isOpen ? (
<div className="empty-state">关键事件默认收起需要时再展开查看和筛选</div>
) : (
<div style={{ display: 'grid', gap: 14 }}>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
{availableEventDates.map((dateKey) => {
const isActive = dateKey === selectedEventDate;
return (
<button
key={dateKey}
onClick={() => onSelectEventDate(dateKey)}
style={{
border: '1px solid #111111',
background: isActive ? '#111111' : '#ffffff',
color: isActive ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{dateKey}
</button>
);
})}
</div>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
{Object.entries(eventCategoryMeta)
.filter(([key]) => (eventCategoryCounts[key] || 0) > 0 || key === 'all')
.map(([key, meta]) => {
const isActive = key === activeEventCategory;
return (
<button
key={key}
onClick={() => onSelectEventCategory(key)}
style={{
border: `1px solid ${meta.color}`,
background: isActive ? meta.color : '#ffffff',
color: isActive ? '#ffffff' : meta.color,
padding: '8px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{meta.label} {eventCategoryCounts[key] || 0}
</button>
);
})}
</div>
{visibleExplainEvents.length === 0 ? (
<div className="empty-state">当前日期下没有符合筛选条件的事件</div>
) : (
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(320px, 1fr))', gap: 16 }}>
{visibleExplainEvents.map((event) => {
const accent = event.tone === 'positive' ? '#00C853' : event.tone === 'negative' ? '#FF1744' : '#000000';
const categoryMeta = eventCategoryMeta[event.category] || eventCategoryMeta.other;
return (
<div
key={event.id}
style={{
border: '1px solid #000000',
background: '#ffffff',
padding: 14,
minHeight: 180
}}
>
<div style={{ display: 'flex', justifyContent: 'space-between', gap: 12, marginBottom: 8 }}>
<div style={{ display: 'flex', alignItems: 'center', gap: 8, flexWrap: 'wrap' }}>
<span style={{
display: 'inline-flex',
padding: '2px 6px',
border: `1px solid ${categoryMeta.color}`,
color: categoryMeta.color,
fontSize: 10,
fontWeight: 700
}}>
{categoryMeta.label}
</span>
<strong style={{ fontSize: 13 }}>{event.title}</strong>
</div>
<span style={{ fontSize: 10, color: '#666666', whiteSpace: 'nowrap' }}>
{formatDateTime(event.timestamp)}
</span>
</div>
<div style={{ display: 'flex', alignItems: 'center', gap: 8, marginBottom: 10 }}>
<span style={{
width: 8,
height: 8,
borderRadius: '50%',
background: accent
}} />
<span style={{ fontSize: 10, color: '#666666', textTransform: 'uppercase', letterSpacing: 0.6 }}>
{event.meta}
</span>
</div>
<div style={{ fontSize: 12, lineHeight: 1.7, color: '#000000', whiteSpace: 'pre-wrap', wordBreak: 'break-word' }}>
{event.body}
</div>
</div>
);
})}
</div>
)}
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,249 @@
import React from 'react';
function toggleButtonStyle(active, accent = '#111111') {
return {
border: `1px solid ${accent}`,
background: active ? accent : '#ffffff',
color: active ? '#ffffff' : accent,
padding: '6px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
};
}
export default function ExplainMaintenanceSection({
selectedSymbol,
enrichStartDate,
enrichEndDate,
onChangeStartDate,
onChangeEndDate,
forceEnrich,
onToggleForce,
onlyLocalToLlm,
onToggleOnlyLocalToLlm,
rebuildStory,
onToggleRebuildStory,
rebuildSimilarDays,
onToggleRebuildSimilarDays,
isRunning,
onRunEnrich,
maintenanceStatus,
maintenanceHistory,
onSelectHistory,
onReplayHistory,
isOpen,
onToggle,
}) {
const stats = maintenanceStatus?.stats || null;
const summary = stats?.execution_summary || null;
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">分析数据维护</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
当前标的 {selectedSymbol || '-'}
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起刷新工具' : '展开刷新工具'}
</button>
</div>
</div>
{!isOpen ? (
<div className="empty-state">刷新工具默认收起需要时再展开重新分析数据或查看历史</div>
) : (
<div
style={{
border: '1px solid #000000',
background: '#ffffff',
padding: 14,
display: 'grid',
gap: 14,
}}
>
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(180px, 1fr))', gap: 12 }}>
<label style={{ display: 'grid', gap: 6, fontSize: 11, fontWeight: 700 }}>
开始日期
<input type="date" value={enrichStartDate} onChange={(e) => onChangeStartDate(e.target.value)} style={{ border: '1px solid #111111', padding: '8px 10px', fontFamily: 'inherit' }} />
</label>
<label style={{ display: 'grid', gap: 6, fontSize: 11, fontWeight: 700 }}>
结束日期
<input type="date" value={enrichEndDate} onChange={(e) => onChangeEndDate(e.target.value)} style={{ border: '1px solid #111111', padding: '8px 10px', fontFamily: 'inherit' }} />
</label>
</div>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
<button onClick={onToggleForce} style={toggleButtonStyle(forceEnrich, '#b91c1c')}>
{forceEnrich ? '覆盖已有分析' : '仅补缺失'}
</button>
<button onClick={onToggleOnlyLocalToLlm} style={toggleButtonStyle(onlyLocalToLlm, '#7c3aed')}>
{onlyLocalToLlm ? '仅将规则分析升级为 LLM分析' : '不限制分析来源'}
</button>
<button onClick={onToggleRebuildStory} style={toggleButtonStyle(rebuildStory, '#2563eb')}>
{rebuildStory ? '重建主线叙事' : '跳过主线叙事'}
</button>
<button onClick={onToggleRebuildSimilarDays} style={toggleButtonStyle(rebuildSimilarDays, '#15803d')}>
{rebuildSimilarDays ? '重建相似交易日' : '跳过相似交易日'}
</button>
</div>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<button
onClick={onRunEnrich}
disabled={isRunning || !selectedSymbol || !enrichStartDate || !enrichEndDate}
style={{
border: '1px solid #111111',
background: isRunning ? '#d1d5db' : '#111111',
color: '#ffffff',
padding: '9px 14px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: isRunning ? 'wait' : 'pointer'
}}
>
{isRunning ? '执行中...' : '重新分析当前区间'}
</button>
{maintenanceStatus?.updatedAt ? (
<span style={{ fontSize: 11, color: '#666666' }}>
最近一次执行: {maintenanceStatus.updatedAt}
</span>
) : null}
</div>
{maintenanceStatus?.error ? (
<div style={{ fontSize: 11, color: '#991b1b', lineHeight: 1.7 }}>
执行失败: {maintenanceStatus.error}
</div>
) : null}
{stats ? (
<>
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(140px, 1fr))', gap: 10 }}>
{[
['新闻总数', stats.news_count],
['待处理', stats.queued_count],
['已分析', stats.analyzed],
['已跳过', stats.skipped_existing_count],
['去重数', stats.deduped_count],
['LLM分析', stats.llm_count],
['规则分析', stats.local_count],
['升级数', stats.upgraded_local_to_llm_count],
].map(([label, value]) => (
<div key={label} style={{ border: '1px solid #111111', padding: 10 }}>
<div style={{ fontSize: 10, color: '#666666' }}>{label}</div>
<div style={{ fontSize: 18, fontWeight: 700 }}>{value ?? '-'}</div>
</div>
))}
</div>
{summary ? (
<div style={{ border: '1px solid #111111', padding: 12, fontSize: 11, lineHeight: 1.8 }}>
{summary.upgraded_dates?.length ? (
<div><strong>升级日期:</strong> {summary.upgraded_dates.join(', ')}</div>
) : null}
{summary.remaining_local_titles?.length ? (
<div><strong>仍为规则分析:</strong> {summary.remaining_local_titles.join(' / ')}</div>
) : null}
{typeof summary.skipped_non_local_count === 'number' ? (
<div><strong>跳过非规则分析:</strong> {summary.skipped_non_local_count}</div>
) : null}
{typeof summary.skipped_missing_analysis_count === 'number' ? (
<div><strong>跳过无历史分析:</strong> {summary.skipped_missing_analysis_count}</div>
) : null}
</div>
) : null}
</>
) : null}
{Array.isArray(maintenanceHistory) && maintenanceHistory.length > 0 ? (
<div style={{ border: '1px solid #111111', padding: 12, display: 'grid', gap: 8 }}>
<div style={{ fontSize: 11, fontWeight: 700 }}>最近刷新历史</div>
{maintenanceHistory.slice(0, 5).map((item, index) => (
<div
key={`${item.timestamp || 'history'}-${index}`}
style={{
borderTop: index === 0 ? 'none' : '1px solid #e5e7eb',
paddingTop: index === 0 ? 0 : 8,
fontSize: 11,
lineHeight: 1.8,
}}
>
<div>
<strong>{item.startDate || '-'}</strong> ~ <strong>{item.endDate || '-'}</strong>
{' · '}
{item.onlyLocalToLlm ? '规则分析→LLM分析' : item.force ? '覆盖重跑' : '补缺失'}
{item.storyStatus ? ' · 主线叙事' : ''}
{item.similarStatus ? ' · 相似交易日' : ''}
</div>
<div style={{ color: item.error ? '#991b1b' : '#4b5563' }}>
{item.timestamp || '-'}
{item.error
? ` · 失败: ${item.error}`
: ` · 已分析 ${item.stats?.analyzed ?? 0},已升级 ${item.stats?.upgraded_local_to_llm_count ?? 0}`}
</div>
<div style={{ marginTop: 4 }}>
<button
onClick={() => onSelectHistory?.(item)}
style={{
border: '1px solid #111111',
background: '#ffffff',
color: '#111111',
padding: '4px 8px',
fontFamily: 'inherit',
fontSize: 10,
fontWeight: 700,
cursor: 'pointer'
}}
>
回填到表单
</button>
<button
onClick={() => onReplayHistory?.(item)}
style={{
marginLeft: 8,
border: '1px solid #111111',
background: '#111111',
color: '#ffffff',
padding: '4px 8px',
fontFamily: 'inherit',
fontSize: 10,
fontWeight: 700,
cursor: 'pointer'
}}
>
直接重跑
</button>
{!item.error ? (
<span style={{ marginLeft: 8, fontSize: 10, color: '#666666' }}>
{item.stats?.execution_summary?.upgraded_dates?.length
? `升级日 ${item.stats.execution_summary.upgraded_dates.join(', ')}`
: '无升级日期摘要'}
</span>
) : null}
</div>
</div>
))}
</div>
) : null}
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,77 @@
import React from 'react';
import { formatDateTime } from '../../utils/formatters';
export default function ExplainMentionsSection({
recentMentions,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">讨论提及</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
从交易讨论和分析 feed 提取
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起讨论摘录' : `展开讨论摘录 ${recentMentions.length}`}
</button>
</div>
</div>
{recentMentions.length === 0 ? (
<div className="empty-state">最近没有在讨论里提到这只股票</div>
) : !isOpen ? (
<div className="empty-state">讨论摘录默认收起需要时再展开查看</div>
) : (
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(320px, 1fr))', gap: 16 }}>
{recentMentions.map((message, index) => (
<div
key={`${message.feedId || message.id}-${index}`}
style={{
border: '1px solid #000000',
background: '#fafafa',
padding: 14,
minHeight: 150
}}
>
<div style={{ display: 'flex', justifyContent: 'space-between', gap: 12, marginBottom: 10 }}>
<div>
<div style={{ fontWeight: 700, color: '#000000' }}>{message.agent || '未知角色'}</div>
<div style={{ fontSize: 10, color: '#666666' }}>
{message.conferenceTitle || (message.feedType === 'conference' ? '投资讨论' : '即时消息')}
</div>
</div>
<div style={{ fontSize: 10, color: '#666666', whiteSpace: 'nowrap' }}>
{formatDateTime(message.timestamp)}
</div>
</div>
<div style={{
fontSize: 12,
lineHeight: 1.7,
color: '#000000',
whiteSpace: 'pre-wrap',
wordBreak: 'break-word'
}}>
{String(message.content || '')}
</div>
</div>
))}
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,308 @@
import React from 'react';
import { formatDateTime } from '../../utils/formatters';
function categoryLabel(value) {
const normalized = String(value || '').trim().toLowerCase();
const labels = {
market: '市场交易',
policy: '政策监管',
earnings: '业绩财报',
product_tech: '产品技术',
competition: '竞争格局',
management: '管理层动态',
};
return labels[normalized] || value || '';
}
function relevanceLabel(value) {
const normalized = String(value || '').trim().toLowerCase();
const labels = {
high: '高相关',
medium: '中相关',
low: '低相关',
relevant: '高相关',
};
return labels[normalized] || value || '';
}
function analysisSourceLabel(value) {
const normalized = String(value || '').trim().toLowerCase();
if (normalized === 'llm') return 'LLM分析';
if (normalized === 'local') return '规则分析';
return value || '';
}
function sentimentStyle(sentiment) {
const normalized = String(sentiment || '').trim().toLowerCase();
if (normalized === 'positive') {
return { border: '#16a34a', background: '#f0fdf4', color: '#166534', label: '利多' };
}
if (normalized === 'negative') {
return { border: '#dc2626', background: '#fef2f2', color: '#991b1b', label: '利空' };
}
return { border: '#6b7280', background: '#f9fafb', color: '#4b5563', label: '中性' };
}
export default function ExplainNewsSection({
newsSnapshot,
visibleNewsByCategory,
visibleNews,
activeNewsCategory,
onSelectNewsCategory,
activeNewsSentiment,
onSelectNewsSentiment,
newsCategories,
tickerNews,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">新闻面板</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
{newsSnapshot?.source ? `最近 ${visibleNewsByCategory.length} 条 · ${newsSnapshot.source}` : `最近 ${visibleNewsByCategory.length} 条真实新闻`}
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起新闻面板' : '展开新闻面板'}
</button>
</div>
</div>
{!isOpen ? (
<div className="empty-state">新闻面板已收起需要时再展开查看分类情绪和新闻卡片</div>
) : (
<>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap', marginBottom: 14 }}>
<button
onClick={() => onSelectNewsCategory('all')}
style={{
border: '1px solid #111111',
background: activeNewsCategory === 'all' ? '#111111' : '#ffffff',
color: activeNewsCategory === 'all' ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
全部 {visibleNews.length}
</button>
{Object.entries(newsCategories)
.filter(([, meta]) => Number(meta?.count || 0) > 0)
.map(([key, meta]) => {
const isActive = activeNewsCategory === key;
const pos = Number(meta?.positive_ids?.length || 0);
const neg = Number(meta?.negative_ids?.length || 0);
return (
<button
key={key}
onClick={() => onSelectNewsCategory(key)}
style={{
border: '1px solid #2563eb',
background: isActive ? '#2563eb' : '#ffffff',
color: isActive ? '#ffffff' : '#2563eb',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{categoryLabel(meta.label || key)} {meta.count}{pos || neg ? ` · +${pos}/-${neg}` : ''}
</button>
);
})}
</div>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap', marginBottom: 14 }}>
{[
{ key: 'all', label: '全部情绪' },
{ key: 'positive', label: '利多' },
{ key: 'negative', label: '利空' },
{ key: 'neutral', label: '中性' }
].map((item) => {
const isActive = activeNewsSentiment === item.key;
return (
<button
key={item.key}
onClick={() => onSelectNewsSentiment(item.key)}
style={{
border: '1px solid #111111',
background: isActive ? '#111111' : '#ffffff',
color: isActive ? '#ffffff' : '#111111',
padding: '6px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{item.label}
</button>
);
})}
</div>
{tickerNews.length === 0 ? (
<div className="empty-state">当前数据源没有返回相关新闻</div>
) : (
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(320px, 1fr))', gap: 16 }}>
{visibleNewsByCategory.map((item) => (
<div
key={item.id}
style={{
border: '1px solid #000000',
background: '#ffffff',
padding: 14,
minHeight: 180
}}
>
{(() => {
const sentimentMeta = sentimentStyle(item.sentiment);
return (
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap', marginBottom: 10 }}>
<span style={{
display: 'inline-flex',
padding: '2px 6px',
border: `1px solid ${sentimentMeta.border}`,
background: sentimentMeta.background,
color: sentimentMeta.color,
fontSize: 10,
fontWeight: 700
}}>
{sentimentMeta.label}
</span>
{item.relevance ? (
<span style={{
display: 'inline-flex',
padding: '2px 6px',
border: '1px solid #111111',
color: '#111111',
fontSize: 10,
fontWeight: 700
}}>
{relevanceLabel(item.relevance)}
</span>
) : null}
{item.analysisSource ? (
<span style={{
display: 'inline-flex',
padding: '2px 6px',
border: '1px solid #6b7280',
color: '#4b5563',
fontSize: 10,
fontWeight: 700
}}>
{analysisSourceLabel(item.analysisSource)}
</span>
) : null}
{item.analysisModelLabel ? (
<span style={{
display: 'inline-flex',
padding: '2px 6px',
border: '1px solid #9ca3af',
color: '#374151',
fontSize: 10,
fontWeight: 700
}}>
{item.analysisModelLabel}
</span>
) : null}
{typeof item.retT0 === 'number' ? (
<span style={{ fontSize: 10, color: item.retT0 >= 0 ? '#15803d' : '#b91c1c', fontWeight: 700 }}>
T0 {item.retT0 >= 0 ? '+' : ''}{(item.retT0 * 100).toFixed(2)}%
</span>
) : null}
</div>
);
})()}
<div style={{ display: 'flex', justifyContent: 'space-between', gap: 12, marginBottom: 8 }}>
<div style={{ display: 'flex', alignItems: 'center', gap: 8, flexWrap: 'wrap' }}>
{item.category ? (
<span style={{
display: 'inline-flex',
padding: '2px 6px',
border: '1px solid #111111',
color: '#111111',
fontSize: 10,
fontWeight: 700
}}>
{categoryLabel(item.category)}
</span>
) : null}
<strong style={{ fontSize: 13 }}>{item.title}</strong>
</div>
<span style={{ fontSize: 10, color: '#666666', whiteSpace: 'nowrap' }}>
{formatDateTime(item.date)}
</span>
</div>
<div style={{ display: 'flex', alignItems: 'center', gap: 8, marginBottom: 10, flexWrap: 'wrap' }}>
<span style={{ fontSize: 10, color: '#666666', textTransform: 'uppercase', letterSpacing: 0.6 }}>
{item.source}
</span>
{item.related ? (
<span style={{ fontSize: 10, color: '#666666' }}>
关联: {item.related}
</span>
) : null}
</div>
<div style={{ fontSize: 12, lineHeight: 1.7, color: '#000000', whiteSpace: 'pre-wrap', wordBreak: 'break-word' }}>
{item.summary || '该新闻没有可用摘要。'}
</div>
{item.keyDiscussion ? (
<div style={{ marginTop: 10, fontSize: 11, lineHeight: 1.7, color: '#374151' }}>
<strong>核心讨论:</strong> {item.keyDiscussion}
</div>
) : null}
{item.reasonGrowth ? (
<div style={{ marginTop: 8, fontSize: 11, lineHeight: 1.7, color: '#166534' }}>
<strong>利多逻辑:</strong> {item.reasonGrowth}
</div>
) : null}
{item.reasonDecrease ? (
<div style={{ marginTop: 8, fontSize: 11, lineHeight: 1.7, color: '#991b1b' }}>
<strong>利空逻辑:</strong> {item.reasonDecrease}
</div>
) : null}
{item.url ? (
<div style={{ marginTop: 12 }}>
<a
href={item.url}
target="_blank"
rel="noreferrer"
style={{ fontSize: 11, fontWeight: 700, color: '#111111', textDecoration: 'underline' }}
>
查看原文
</a>
</div>
) : null}
</div>
))}
</div>
)}
</>
)}
</div>
);
}

View File

@@ -0,0 +1,155 @@
import React from 'react';
import { formatTickerPrice } from '../../utils/formatters';
export default function ExplainPriceSection({
ohlcSeries,
priceSeries,
selectedHistorySource,
chartModel,
selectedTicker,
onSelectEventDate,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">价格与事件</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
{ohlcSeries.length > 1
? `最近 ${ohlcSeries.length} 根日线K线${selectedHistorySource ? ` · ${selectedHistorySource}` : ''}`
: `最近 ${priceSeries.length} 个价格点聚合为 ${chartModel.bucketCount || 0} 根简化K线`}
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起价格区' : '展开价格区'}
</button>
</div>
</div>
{ohlcSeries.length === 0 && priceSeries.length === 0 ? (
<div className="empty-state">当前还没有可绘制的价格历史</div>
) : !isOpen ? (
<div className="empty-state">价格区已收起需要时再展开查看图表和事件点</div>
) : (
<div style={{ border: '1px solid #000000', background: '#ffffff', padding: 16 }}>
<svg
viewBox={`0 0 ${chartModel.width} ${chartModel.height}`}
style={{ width: '100%', height: '220px', display: 'block', overflow: 'visible' }}
>
<defs>
<linearGradient id="stockExplainFill" x1="0" y1="0" x2="0" y2="1">
<stop offset="0%" stopColor="rgba(0,0,0,0.18)" />
<stop offset="100%" stopColor="rgba(0,0,0,0.02)" />
</linearGradient>
</defs>
<rect x="0" y="0" width={chartModel.width} height={chartModel.height} fill="#fafafa" />
<line
x1={chartModel.padding}
y1={chartModel.height - chartModel.padding}
x2={chartModel.width - chartModel.padding}
y2={chartModel.height - chartModel.padding}
stroke="#000000"
strokeWidth="1"
/>
{chartModel.candles.length > 1 ? chartModel.candles.map((candle) => {
const rising = candle.close >= candle.open;
const stroke = rising ? '#00C853' : '#FF1744';
const fill = rising ? 'rgba(0, 200, 83, 0.16)' : 'rgba(255, 23, 68, 0.16)';
return (
<g key={candle.id}>
<line
x1={candle.centerX}
y1={candle.highY}
x2={candle.centerX}
y2={candle.lowY}
stroke={stroke}
strokeWidth="1.4"
/>
<rect
x={candle.x}
y={candle.bodyY}
width={candle.width}
height={candle.bodyHeight}
fill={fill}
stroke={stroke}
strokeWidth="1.4"
/>
</g>
);
}) : chartModel.path && (
<>
<path d={`${chartModel.path} L${chartModel.width - chartModel.padding},${chartModel.height - chartModel.padding} L${chartModel.padding},${chartModel.height - chartModel.padding} Z`} fill="url(#stockExplainFill)" />
<path d={chartModel.path} fill="none" stroke="#000000" strokeWidth="2.5" />
</>
)}
{chartModel.markers.map((marker) => {
const fill = marker.tone === 'positive'
? '#00C853'
: marker.tone === 'negative'
? '#FF1744'
: marker.tone === 'news'
? '#2563eb'
: '#000000';
return (
<g
key={marker.id}
onClick={() => onSelectEventDate(marker.dateKey)}
style={{ cursor: 'pointer' }}
>
<line x1={marker.x} y1={marker.y} x2={marker.x} y2={chartModel.height - chartModel.padding} stroke={fill} strokeDasharray="3 3" strokeWidth="1" />
<circle
cx={marker.x}
cy={marker.y}
r={marker.markerType === 'news'
? (marker.isSelected ? '5.5' : '4')
: (marker.isSelected ? '6' : '4.5')}
fill={fill}
stroke={marker.isSelected ? '#111111' : '#ffffff'}
strokeWidth={marker.isSelected ? '2.5' : '2'}
/>
<title>{`${marker.title} · ${marker.dateKey || ''}${marker.count ? ` · ${marker.count} 条新闻` : ''}`}</title>
</g>
);
})}
<text x={chartModel.padding} y="14" fontSize="11" fill="#666666">
{chartModel.maxPrice != null ? `高点 $${formatTickerPrice(chartModel.maxPrice)}` : ''}
</text>
<text x={chartModel.padding} y={chartModel.height - 6} fontSize="11" fill="#666666">
{chartModel.minPrice != null ? `低点 $${formatTickerPrice(chartModel.minPrice)}` : ''}
</text>
<text x={chartModel.width - chartModel.padding} y="14" fontSize="11" fill="#666666" textAnchor="end">
{selectedTicker?.price != null ? `现价 $${formatTickerPrice(selectedTicker.price)}` : ''}
</text>
</svg>
<div style={{ display: 'flex', gap: 12, flexWrap: 'wrap', marginTop: 12 }}>
<div style={{ fontSize: 11, color: '#666666' }}>
图表说明{ohlcSeries.length > 1 ? '历史日线K线' : '基于盘中价格点聚合的简化K线'}
</div>
<div style={{ fontSize: 11, color: '#2563eb' }}>蓝点新闻日期</div>
<div style={{ fontSize: 11, color: '#666666' }}>黑点讨论提及</div>
<div style={{ fontSize: 11, color: '#00C853' }}>绿点偏多信号或做多成交</div>
<div style={{ fontSize: 11, color: '#FF1744' }}>红点偏空信号或做空成交</div>
</div>
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,220 @@
import React from 'react';
import { formatTickerPrice } from '../../utils/formatters';
function renderSentimentLabel(value) {
const normalized = String(value || '').trim().toLowerCase();
if (normalized === 'positive') return '利多';
if (normalized === 'negative') return '利空';
if (normalized === 'neutral') return '中性';
return value || '';
}
function renderCategoryLabel(value) {
const normalized = String(value || '').trim().toLowerCase();
const labels = {
market: '市场交易',
policy: '政策监管',
earnings: '业绩财报',
product_tech: '产品技术',
competition: '竞争格局',
management: '管理层动态',
};
return labels[normalized] || value || '';
}
function renderAnalysisSourceLabel(value) {
const normalized = String(value || '').trim().toLowerCase();
if (normalized === 'llm') return 'LLM分析';
if (normalized === 'local') return '规则分析';
return value || '';
}
function MetricRow({ label, value, valueColor = '#111111' }) {
return (
<div style={{ display: 'flex', justifyContent: 'space-between', fontSize: 12, gap: 12 }}>
<span style={{ color: '#4b5563' }}>{label}</span>
<strong style={{ color: valueColor, textAlign: 'right' }}>{value}</strong>
</div>
);
}
function TagList({ items, tone = 'neutral', emptyText }) {
const palette = {
positive: { border: '#86efac', background: '#f0fdf4', color: '#166534' },
negative: { border: '#fca5a5', background: '#fef2f2', color: '#991b1b' },
neutral: { border: '#d1d5db', background: '#f9fafb', color: '#374151' },
};
const colors = palette[tone] || palette.neutral;
if (!Array.isArray(items) || items.length === 0) {
return <div style={{ fontSize: 12, lineHeight: 1.7, color: '#6b7280' }}>{emptyText}</div>;
}
return (
<div style={{ display: 'flex', flexWrap: 'wrap', gap: 8 }}>
{items.map((item, index) => (
<div
key={`${tone}-${index}-${item}`}
style={{
border: `1px solid ${colors.border}`,
background: colors.background,
color: colors.color,
padding: '6px 10px',
fontSize: 11,
lineHeight: 1.6,
}}
>
{item}
</div>
))}
</div>
);
}
export default function ExplainRangeSection({
selectedRangeWindow,
selectedRangeExplain,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">区间涨跌分析</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
{selectedRangeWindow
? `${selectedRangeWindow.startDate} ~ ${selectedRangeWindow.endDate}`
: '先在图上选择一个事件日期'}
</div>
{selectedRangeExplain?.analysis?.analysis_source ? (
<div style={{ fontSize: 11, color: '#666666' }}>
{selectedRangeExplain.analysis.analysis_source === 'llm'
? `分析来源 · ${renderAnalysisSourceLabel(selectedRangeExplain.analysis.analysis_source)} · ${selectedRangeExplain.analysis.analysis_model_label || 'LLM'}`
: `分析来源 · ${renderAnalysisSourceLabel(selectedRangeExplain.analysis.analysis_source)}`}
</div>
) : null}
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起区间涨跌分析' : '展开区间涨跌分析'}
</button>
</div>
</div>
{!selectedRangeWindow ? (
<div className="empty-state">选择图上的日期后会自动生成最近 7 天的区间涨跌分析</div>
) : !isOpen ? (
<div className="empty-state">区间涨跌分析已收起需要时再展开查看摘要和快照</div>
) : !selectedRangeExplain ? (
<div className="empty-state">正在生成区间涨跌分析...</div>
) : selectedRangeExplain.error ? (
<div className="empty-state">{selectedRangeExplain.error}</div>
) : (
<div style={{ display: 'grid', gridTemplateColumns: '1.2fr 1fr', gap: 16 }}>
<div style={{ border: '1px solid #000000', background: '#ffffff', padding: 16 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 10, textTransform: 'uppercase', letterSpacing: 1 }}>
区间摘要
</div>
<div style={{ fontSize: 13, lineHeight: 1.8 }}>
{selectedRangeExplain.analysis?.summary || '暂无区间摘要'}
</div>
{selectedRangeExplain.analysis?.trend_analysis ? (
<div style={{ marginTop: 10, fontSize: 12, lineHeight: 1.7, color: '#4b5563' }}>
<strong>趋势拆解:</strong> {selectedRangeExplain.analysis.trend_analysis}
</div>
) : null}
<div style={{ marginTop: 14, display: 'grid', gap: 8 }}>
{(selectedRangeExplain.analysis?.key_events || []).slice(0, 6).map((event, index) => (
<div key={`${event.id || event.title}-${index}`} style={{ borderTop: index === 0 ? 'none' : '1px solid #e5e7eb', paddingTop: index === 0 ? 0 : 8 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 4 }}>
{event.date || '-'} {event.category ? `· ${renderCategoryLabel(event.category)}` : ''} {event.sentiment ? `· ${renderSentimentLabel(event.sentiment)}` : ''}
</div>
<div style={{ fontSize: 12, fontWeight: 700, marginBottom: 4 }}>{event.title}</div>
<div style={{ fontSize: 12, lineHeight: 1.6 }}>{event.summary || '暂无摘要'}</div>
</div>
))}
</div>
</div>
<div style={{ border: '1px solid #000000', background: '#fafafa', padding: 16 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 12, textTransform: 'uppercase', letterSpacing: 1 }}>
区间快照
</div>
<div style={{ display: 'grid', gap: 10 }}>
<div style={{ border: '1px solid #e5e7eb', background: '#ffffff', padding: 12, display: 'grid', gap: 10 }}>
<div style={{ fontSize: 11, fontWeight: 700, color: '#374151' }}>事实概览</div>
<MetricRow
label="区间涨跌"
value={`${Number(selectedRangeExplain.price_change_pct) >= 0 ? '+' : ''}${Number(selectedRangeExplain.price_change_pct || 0).toFixed(2)}%`}
valueColor={Number(selectedRangeExplain.price_change_pct) >= 0 ? '#00C853' : '#FF1744'}
/>
<MetricRow label="关联新闻" value={selectedRangeExplain.news_count || 0} />
<MetricRow label="区间高点" value={`$${formatTickerPrice(selectedRangeExplain.high_price)}`} />
<MetricRow label="区间低点" value={`$${formatTickerPrice(selectedRangeExplain.low_price)}`} />
<MetricRow label="交易日数" value={selectedRangeExplain.trading_days || 0} />
</div>
<div style={{ border: '1px solid #e5e7eb', background: '#ffffff', padding: 12, display: 'grid', gap: 10 }}>
<div style={{ fontSize: 11, fontWeight: 700, color: '#374151' }}>主题分布</div>
{(selectedRangeExplain.dominant_categories || []).length > 0 ? (
<div style={{ display: 'flex', flexWrap: 'wrap', gap: 8 }}>
{selectedRangeExplain.dominant_categories.map((item) => (
<div
key={`${item.category}-${item.count}`}
style={{
border: '1px solid #d1d5db',
background: '#f9fafb',
color: '#374151',
padding: '6px 10px',
fontSize: 11,
lineHeight: 1.5,
}}
>
{renderCategoryLabel(item.category)} · {item.count}
</div>
))}
</div>
) : (
<div style={{ fontSize: 12, lineHeight: 1.7, color: '#6b7280' }}>
当前没有识别出明显的主题聚类
</div>
)}
</div>
<div style={{ border: '1px solid #e5e7eb', background: '#ffffff', padding: 12, display: 'grid', gap: 10 }}>
<div style={{ fontSize: 11, fontWeight: 700, color: '#374151' }}>驱动因素</div>
<div style={{ display: 'grid', gap: 8 }}>
<div style={{ fontSize: 11, fontWeight: 700, color: '#166534' }}>利多因素</div>
<TagList
items={selectedRangeExplain.analysis?.bullish_factors || []}
tone="positive"
emptyText="当前区间内未提炼出明确的利多因素。"
/>
</div>
<div style={{ display: 'grid', gap: 8 }}>
<div style={{ fontSize: 11, fontWeight: 700, color: '#991b1b' }}>利空因素</div>
<TagList
items={selectedRangeExplain.analysis?.bearish_factors || []}
tone="negative"
emptyText="当前区间内未提炼出明确的利空因素。"
/>
</div>
</div>
</div>
</div>
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,123 @@
import React from 'react';
export default function ExplainSignalsSection({
tickerSignals,
signalSummary,
latestSignal,
eventDateKey,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">分析师观点</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
最近 {tickerSignals.length} 条相关信号
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起分析师观点' : '展开分析师观点'}
</button>
</div>
</div>
{!isOpen ? (
<div className="empty-state">分析师观点已收起需要时再展开查看信号统计和明细</div>
) : (
<>
<div className="stats-grid" style={{ marginBottom: 16 }}>
<div className="stat-card">
<div className="stat-card-label">看涨</div>
<div className="stat-card-value positive">{signalSummary.bullish}</div>
</div>
<div className="stat-card">
<div className="stat-card-label">看跌</div>
<div className="stat-card-value negative">{signalSummary.bearish}</div>
</div>
<div className="stat-card">
<div className="stat-card-label">中性</div>
<div className="stat-card-value">{signalSummary.neutral}</div>
</div>
<div className="stat-card">
<div className="stat-card-label">最新结论</div>
<div className="stat-card-value" style={{ fontSize: 22 }}>
{latestSignal
? latestSignal.normalizedDirection === 'bullish'
? '偏多'
: latestSignal.normalizedDirection === 'bearish'
? '偏空'
: '观望'
: '暂无'}
</div>
<div style={{ marginTop: 8, fontSize: 11, color: '#666666' }}>
{latestSignal ? `${latestSignal.agentName} · ${latestSignal.date || eventDateKey(latestSignal.timestamp)}` : '还没有历史信号'}
</div>
</div>
</div>
{tickerSignals.length === 0 ? (
<div className="empty-state">该股票还没有分析师信号记录</div>
) : (
<div className="table-wrapper">
<table className="data-table">
<thead>
<tr>
<th>日期</th>
<th>分析师</th>
<th>方向</th>
<th>实际收益</th>
<th>结果</th>
</tr>
</thead>
<tbody>
{tickerSignals.slice(0, 8).map((signal, index) => {
const realReturn = typeof signal.real_return === 'number'
? `${signal.real_return >= 0 ? '+' : ''}${(signal.real_return * 100).toFixed(2)}%`
: '未判定';
const status = signal.is_correct === true ? '命中' : signal.is_correct === false ? '未命中' : '待判定';
const directionText = signal.normalizedDirection === 'bullish'
? '看涨'
: signal.normalizedDirection === 'bearish'
? '看跌'
: '中性';
const directionColor = signal.normalizedDirection === 'bullish'
? '#00C853'
: signal.normalizedDirection === 'bearish'
? '#FF1744'
: '#666666';
return (
<tr key={signal.id || `${signal.agentId}-${signal.date}-${index}`}>
<td>{signal.date || eventDateKey(signal.timestamp) || '-'}</td>
<td>
<div style={{ fontWeight: 700 }}>{signal.agentName}</div>
<div style={{ fontSize: 10, color: '#666666' }}>{signal.role}</div>
</td>
<td style={{ color: directionColor, fontWeight: 700 }}>{directionText}</td>
<td>{realReturn}</td>
<td>{status}</td>
</tr>
);
})}
</tbody>
</table>
</div>
)}
</>
)}
</div>
);
}

View File

@@ -0,0 +1,111 @@
import React from 'react';
export default function ExplainSimilarDaysSection({
selectedSimilarDays,
selectedEventDate,
onSelectSimilarDate,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">历史相似交易日</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
{selectedEventDate || '先选择一个事件日期'}
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起相似交易日' : '展开相似交易日'}
</button>
</div>
</div>
{!selectedEventDate ? (
<div className="empty-state">选择图上的日期后会检索这只股票历史上的相似交易日</div>
) : !isOpen ? (
<div className="empty-state">相似交易日默认收起需要时再展开查看</div>
) : !selectedSimilarDays ? (
<div className="empty-state">正在检索相似交易日...</div>
) : selectedSimilarDays.error ? (
<div className="empty-state">{selectedSimilarDays.error}</div>
) : !Array.isArray(selectedSimilarDays.items) || selectedSimilarDays.items.length === 0 ? (
<div className="empty-state">当前没有足够历史样本来计算相似交易日</div>
) : (
<div style={{ display: 'grid', gap: 16 }}>
<div style={{ border: '1px solid #000000', background: '#fafafa', padding: 16 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 12, textTransform: 'uppercase', letterSpacing: 1 }}>
目标日快照
</div>
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(140px, 1fr))', gap: 12 }}>
<div style={{ fontSize: 12 }}>
<div style={{ color: '#666666', marginBottom: 4 }}>新闻数量</div>
<strong>{selectedSimilarDays.target_features?.n_articles ?? 0}</strong>
</div>
<div style={{ fontSize: 12 }}>
<div style={{ color: '#666666', marginBottom: 4 }}>情绪分数</div>
<strong>{Number(selectedSimilarDays.target_features?.sentiment_score ?? 0).toFixed(2)}</strong>
</div>
<div style={{ fontSize: 12 }}>
<div style={{ color: '#666666', marginBottom: 4 }}>前一日涨跌</div>
<strong>{Number(selectedSimilarDays.target_features?.ret_1d ?? 0).toFixed(2)}%</strong>
</div>
<div style={{ fontSize: 12 }}>
<div style={{ color: '#666666', marginBottom: 4 }}>高相关新闻</div>
<strong>{selectedSimilarDays.target_features?.high_relevance_count ?? 0}</strong>
</div>
</div>
</div>
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(260px, 1fr))', gap: 16 }}>
{selectedSimilarDays.items.map((item) => (
<button
key={item.date}
onClick={() => onSelectSimilarDate?.(item.date)}
style={{
border: '1px solid #000000',
background: '#ffffff',
padding: 14,
textAlign: 'left',
cursor: 'pointer',
fontFamily: 'inherit'
}}
>
<div style={{ display: 'flex', justifyContent: 'space-between', gap: 10, marginBottom: 8 }}>
<strong style={{ fontSize: 13 }}>{item.date}</strong>
<span style={{ fontSize: 11, color: '#666666' }}>
相似度 {(Number(item.score || 0) * 100).toFixed(0)}%
</span>
</div>
<div style={{ display: 'grid', gap: 6, fontSize: 12, marginBottom: 10 }}>
<div>新闻数 {item.n_articles ?? 0}</div>
<div>情绪分数 {Number(item.sentiment_score ?? 0).toFixed(2)}</div>
<div>前一日涨跌 {Number(item.ret_1d ?? 0).toFixed(2)}%</div>
<div>次日表现 {item.ret_t1_after != null ? `${item.ret_t1_after >= 0 ? '+' : ''}${Number(item.ret_t1_after).toFixed(2)}%` : '-'}</div>
<div>三日表现 {item.ret_t3_after != null ? `${item.ret_t3_after >= 0 ? '+' : ''}${Number(item.ret_t3_after).toFixed(2)}%` : '-'}</div>
</div>
{(item.top_reasons || []).length > 0 ? (
<div style={{ fontSize: 11, lineHeight: 1.7, color: '#4b5563' }}>
<strong>主要线索:</strong> {item.top_reasons.join(' / ')}
</div>
) : null}
</button>
))}
</div>
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,58 @@
import React from 'react';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
export default function ExplainStorySection({
selectedStory,
selectedSymbol,
currentDate,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">主线叙事</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
{selectedStory?.asOfDate || currentDate || '按当前解释窗口生成'}
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起主线叙事' : '展开主线叙事'}
</button>
</div>
</div>
{!selectedSymbol ? (
<div className="empty-state">先选择一只股票</div>
) : !isOpen ? (
<div className="empty-state">主线叙事默认收起需要时再展开查看完整叙事</div>
) : !selectedStory?.story ? (
<div className="empty-state">正在生成主线叙事...</div>
) : (
<div style={{ border: '1px solid #000000', background: '#ffffff', padding: 18 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 12, textTransform: 'uppercase', letterSpacing: 1 }}>
{selectedStory?.source ? `来源 · ${selectedStory.source}` : '自动生成'}
</div>
<div style={{ fontSize: 13, lineHeight: 1.8 }}>
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{selectedStory.story}
</ReactMarkdown>
</div>
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,87 @@
import React from 'react';
export default function ExplainSummarySection({
explainSummary,
tickerSignals,
recentMentions,
tickerTrades,
tickerNews,
selectedSymbol,
isOpen,
onToggle,
}) {
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">分析摘要</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
基于当前持仓信号和讨论自动汇总
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起分析摘要' : '展开分析摘要'}
</button>
</div>
</div>
{!isOpen ? (
<div className="empty-state">分析摘要已收起需要时再展开查看概览和密度信息</div>
) : (
<div style={{ display: 'grid', gridTemplateColumns: '1.2fr 1fr', gap: 16 }}>
<div style={{ border: '1px solid #000000', background: '#fafafa', padding: 16 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 10, textTransform: 'uppercase', letterSpacing: 1 }}>
当前解释
</div>
<div style={{ display: 'flex', flexDirection: 'column', gap: 10 }}>
{explainSummary.map((line, index) => (
<div key={`${selectedSymbol}-summary-${index}`} style={{ fontSize: 13, lineHeight: 1.7, color: '#000000' }}>
{line}
</div>
))}
</div>
</div>
<div style={{ border: '1px solid #000000', background: '#ffffff', padding: 16 }}>
<div style={{ fontSize: 11, color: '#666666', marginBottom: 12, textTransform: 'uppercase', letterSpacing: 1 }}>
信号密度
</div>
<div style={{ display: 'grid', gap: 10 }}>
<div style={{ display: 'flex', justifyContent: 'space-between', fontSize: 12 }}>
<span>分析师信号</span>
<strong>{tickerSignals.length}</strong>
</div>
<div style={{ display: 'flex', justifyContent: 'space-between', fontSize: 12 }}>
<span>讨论提及</span>
<strong>{recentMentions.length}</strong>
</div>
<div style={{ display: 'flex', justifyContent: 'space-between', fontSize: 12 }}>
<span>成交记录</span>
<strong>{tickerTrades.length}</strong>
</div>
<div style={{ display: 'flex', justifyContent: 'space-between', fontSize: 12 }}>
<span>新闻条目</span>
<strong>{tickerNews.length}</strong>
</div>
<div style={{ height: 1, background: '#e0e0e0', margin: '4px 0' }} />
<div style={{ fontSize: 12, lineHeight: 1.7, color: '#666666' }}>
当前分析优先读取已落库的历史记录缺失时再回退到本次运行中的实时事件
</div>
</div>
</div>
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,74 @@
import React from 'react';
import { formatDateTime } from '../../utils/formatters';
export default function ExplainTradesSection({
tickerTrades,
selectedSymbol,
isOpen,
onToggle,
}) {
const sideLabel = (value) => {
if (value === 'LONG') return '做多';
if (value === 'SHORT') return '做空';
return value || '-';
};
return (
<div className="section">
<div className="section-header">
<h2 className="section-title">成交记录</h2>
<div style={{ display: 'flex', alignItems: 'center', gap: 12, flexWrap: 'wrap' }}>
<div style={{ fontSize: 11, color: '#666666' }}>
{tickerTrades.length} 笔与 {selectedSymbol} 相关的交易
</div>
<button
onClick={onToggle}
style={{
border: '1px solid #111111',
background: isOpen ? '#111111' : '#ffffff',
color: isOpen ? '#ffffff' : '#111111',
padding: '7px 10px',
fontFamily: 'inherit',
fontSize: 11,
fontWeight: 700,
cursor: 'pointer'
}}
>
{isOpen ? '收起成交记录' : `展开成交记录 ${tickerTrades.length}`}
</button>
</div>
</div>
{tickerTrades.length === 0 ? (
<div className="empty-state">该股票暂无成交记录</div>
) : !isOpen ? (
<div className="empty-state">成交记录默认收起需要时再展开查看</div>
) : (
<div className="table-wrapper">
<table className="data-table">
<thead>
<tr>
<th>时间</th>
<th>方向</th>
<th>数量</th>
<th>价格</th>
</tr>
</thead>
<tbody>
{tickerTrades.slice(0, 10).map((trade, index) => (
<tr key={trade.id || `${trade.ticker}-${trade.timestamp}-${index}`}>
<td>{formatDateTime(trade.timestamp)}</td>
<td style={{ fontWeight: 700, color: trade.side === 'LONG' ? '#00C853' : trade.side === 'SHORT' ? '#FF1744' : '#000000' }}>
{sideLabel(trade.side)}
</td>
<td>{trade.qty}</td>
<td>${Number(trade.price).toFixed(2)}</td>
</tr>
))}
</tbody>
</table>
</div>
)}
</div>
);
}

View File

@@ -0,0 +1,281 @@
export function normalizeSignalDirection(signal) {
const value = String(signal || '').trim().toLowerCase();
if (!value) return 'neutral';
if (value.includes('bull') || value === 'long' || value === 'buy') return 'bullish';
if (value.includes('bear') || value === 'short' || value === 'sell') return 'bearish';
return 'neutral';
}
export function includesTicker(content, ticker) {
if (!ticker || typeof content !== 'string') return false;
const normalized = ticker.trim().toUpperCase();
if (!normalized) return false;
return new RegExp(`\\b${normalized}\\b`, 'i').test(content);
}
export function flattenFeedMessages(feed) {
if (!Array.isArray(feed)) return [];
const items = [];
feed.forEach((item) => {
if (!item || !item.type || !item.data) return;
if (item.type === 'message' || item.type === 'memory') {
items.push({ ...item.data, feedType: item.type, feedId: item.id });
return;
}
if (item.type === 'conference' && Array.isArray(item.data.messages)) {
item.data.messages.forEach((message) => {
items.push({
...message,
feedType: 'conference',
feedId: item.id,
conferenceTitle: item.data.title
});
});
}
});
return items;
}
export function snippetText(content, ticker) {
const raw = String(content || '').replace(/\s+/g, ' ').trim();
if (!raw) return '';
const normalizedTicker = String(ticker || '').trim().toUpperCase();
if (!normalizedTicker) {
return raw.length > 220 ? `${raw.slice(0, 220)}...` : raw;
}
const upper = raw.toUpperCase();
const idx = upper.indexOf(normalizedTicker);
if (idx === -1) {
return raw.length > 220 ? `${raw.slice(0, 220)}...` : raw;
}
const start = Math.max(0, idx - 90);
const end = Math.min(raw.length, idx + normalizedTicker.length + 130);
const snippet = raw.slice(start, end).trim();
return `${start > 0 ? '...' : ''}${snippet}${end < raw.length ? '...' : ''}`;
}
export function buildLinePath(points, width, height, padding) {
if (!Array.isArray(points) || points.length === 0) {
return '';
}
const prices = points.map((point) => Number(point.price)).filter(Number.isFinite);
if (!prices.length) {
return '';
}
const minPrice = Math.min(...prices);
const maxPrice = Math.max(...prices);
const span = maxPrice - minPrice || 1;
const innerWidth = width - padding * 2;
const innerHeight = height - padding * 2;
return points.map((point, index) => {
const x = padding + (innerWidth * index) / Math.max(points.length - 1, 1);
const y = height - padding - ((Number(point.price) - minPrice) / span) * innerHeight;
return `${index === 0 ? 'M' : 'L'}${x.toFixed(2)},${y.toFixed(2)}`;
}).join(' ');
}
export function parsePointTime(point) {
const raw = point?.timestamp ?? point?.label;
if (!raw) return NaN;
const direct = new Date(raw).getTime();
if (Number.isFinite(direct)) return direct;
return new Date(`${raw}T00:00:00`).getTime();
}
export function aggregatePriceSeriesToCandles(points) {
if (!Array.isArray(points) || points.length === 0) {
return [];
}
const bucketTarget = points.length >= 36 ? 12 : points.length >= 18 ? 8 : 4;
const bucketSize = Math.max(1, Math.ceil(points.length / bucketTarget));
const candles = [];
for (let index = 0; index < points.length; index += bucketSize) {
const bucket = points.slice(index, index + bucketSize);
const prices = bucket.map((point) => Number(point.price)).filter(Number.isFinite);
if (!prices.length) {
continue;
}
candles.push({
id: `${bucket[0]?.timestamp || index}-${bucket[bucket.length - 1]?.timestamp || index + bucket.length}`,
open: Number(bucket[0].price),
high: Math.max(...prices),
low: Math.min(...prices),
close: Number(bucket[bucket.length - 1].price),
startTimestamp: parsePointTime(bucket[0]),
endTimestamp: parsePointTime(bucket[bucket.length - 1]),
startLabel: bucket[0]?.label || bucket[0]?.timestamp || '',
endLabel: bucket[bucket.length - 1]?.label || bucket[bucket.length - 1]?.timestamp || ''
});
}
return candles;
}
export function eventDateKey(timestamp) {
if (!timestamp) return '';
const parsed = new Date(timestamp);
if (!Number.isNaN(parsed.getTime())) {
return parsed.toISOString().slice(0, 10);
}
return String(timestamp).slice(0, 10);
}
export function resolveEventCategory(event) {
if (!event) return 'other';
if (event.type === 'trade') return 'trade';
if (event.type === 'mention') return 'discussion';
if (event.type !== 'signal') return 'other';
const role = String(event.meta || '').toLowerCase();
if (role.includes('technical')) return 'technical';
if (role.includes('fundamental')) return 'fundamental';
if (role.includes('sentiment')) return 'sentiment';
if (role.includes('valuation')) return 'valuation';
if (role.includes('risk')) return 'risk';
if (role.includes('portfolio')) return 'portfolio';
return 'signal';
}
export function normalizeTradeRow(row, fallbackIndex = 0) {
if (!row || typeof row !== 'object') return null;
const timestamp = row.timestamp || row.ts || row.created_at || null;
const ticker = row.ticker || '';
const side = row.side || '';
const qtyValue = Number(row.qty ?? row.quantity ?? 0);
const priceValue = Number(row.price ?? 0);
return {
id: row.id || `trade-${ticker}-${timestamp || fallbackIndex}-${fallbackIndex}`,
timestamp,
trading_date: row.trading_date || row.trade_date || null,
ticker,
side,
qty: Number.isFinite(qtyValue) ? qtyValue : 0,
price: Number.isFinite(priceValue) ? priceValue : 0
};
}
export function normalizeSignalRow(row, fallbackIndex = 0) {
if (!row || typeof row !== 'object') return null;
const timestamp = row.timestamp || row.created_at || null;
const date = row.date || row.trade_date || eventDateKey(timestamp) || '';
const rawSignal = row.signal || row.title || '';
const normalizedDirection = normalizeSignalDirection(rawSignal);
const confidenceValue = Number(row.confidence);
const realReturnValue = Number(row.real_return);
const parsedCorrect = typeof row.is_correct === 'string'
? row.is_correct.toLowerCase() === 'true'
? true
: row.is_correct.toLowerCase() === 'false'
? false
: null
: typeof row.is_correct === 'boolean'
? row.is_correct
: null;
return {
id: row.id || `signal-${row.agent_id || row.agentId || 'agent'}-${date || fallbackIndex}-${fallbackIndex}`,
timestamp,
date,
ticker: row.ticker || '',
signal: rawSignal,
confidence: Number.isFinite(confidenceValue) ? confidenceValue : null,
real_return: Number.isFinite(realReturnValue) ? realReturnValue : null,
is_correct: parsedCorrect,
agentId: row.agent_id || row.agentId || '',
agentName: row.agent_name || row.agentName || row.meta || '未知分析师',
role: row.role || row.meta || '',
normalizedDirection
};
}
export function normalizeMentionRow(row, fallbackIndex = 0) {
if (!row || typeof row !== 'object') return null;
return {
id: row.id || `mention-${fallbackIndex}`,
feedId: row.id || `mention-${fallbackIndex}`,
timestamp: row.timestamp || null,
agent: row.agent || row.agentName || '未知角色',
content: row.body || row.content || '',
conferenceTitle: row.meta || '',
feedType: 'sqlite'
};
}
export function normalizeNewsRow(row, fallbackIndex = 0) {
if (!row || typeof row !== 'object') return null;
const date = row.date || row.published_utc || row.timestamp || null;
const source = row.source || row.publisher || '新闻源';
const title = row.title || '未命名新闻';
const summary = row.summary || row.description || '';
return {
id: row.id || row.url || `news-${fallbackIndex}`,
date,
dateKey: eventDateKey(date),
ticker: row.ticker || '',
title,
source,
category: row.category || '',
related: row.related || '',
summary,
url: row.url || row.article_url || '',
tradeDate: row.trade_date || null,
relevance: row.relevance || '',
sentiment: row.sentiment || '',
keyDiscussion: row.key_discussion || '',
reasonGrowth: row.reason_growth || '',
reasonDecrease: row.reason_decrease || '',
retT0: Number.isFinite(Number(row.ret_t0)) ? Number(row.ret_t0) : null,
retT1: Number.isFinite(Number(row.ret_t1)) ? Number(row.ret_t1) : null,
retT3: Number.isFinite(Number(row.ret_t3)) ? Number(row.ret_t3) : null,
retT5: Number.isFinite(Number(row.ret_t5)) ? Number(row.ret_t5) : null,
retT10: Number.isFinite(Number(row.ret_t10)) ? Number(row.ret_t10) : null,
analysisSource: row.analysis_source || '',
analysisModelLabel: row.analysis_model_label || ''
};
}
export function normalizeNewsTimelineRow(row, fallbackIndex = 0) {
if (!row || typeof row !== 'object') return null;
const date = row.date || row.trade_date || null;
if (!date) return null;
const countValue = Number(row.count ?? 0);
const sourceCountValue = Number(row.source_count ?? 0);
return {
id: row.id || `news-timeline-${date}-${fallbackIndex}`,
date,
dateKey: eventDateKey(date),
count: Number.isFinite(countValue) ? countValue : 0,
sourceCount: Number.isFinite(sourceCountValue) ? sourceCountValue : 0,
topTitle: row.top_title || '',
positiveCount: Number.isFinite(Number(row.positive_count)) ? Number(row.positive_count) : 0,
negativeCount: Number.isFinite(Number(row.negative_count)) ? Number(row.negative_count) : 0,
neutralCount: Number.isFinite(Number(row.neutral_count)) ? Number(row.neutral_count) : 0,
highRelevanceCount: Number.isFinite(Number(row.high_relevance_count)) ? Number(row.high_relevance_count) : 0
};
}
export const EVENT_CATEGORY_META = {
all: { label: '全部事件', color: '#111111' },
discussion: { label: '讨论', color: '#555555' },
signal: { label: '信号', color: '#0f766e' },
technical: { label: '技术', color: '#2563eb' },
fundamental: { label: '基本面', color: '#059669' },
sentiment: { label: '情绪', color: '#7c3aed' },
valuation: { label: '估值', color: '#d97706' },
risk: { label: '风控', color: '#dc2626' },
portfolio: { label: '组合', color: '#111827' },
trade: { label: '成交', color: '#b91c1c' },
other: { label: '其他', color: '#6b7280' }
};

View File

@@ -0,0 +1,664 @@
import { useMemo } from 'react';
import { formatDateTime } from '../../utils/formatters';
import {
aggregatePriceSeriesToCandles,
buildLinePath,
eventDateKey,
flattenFeedMessages,
includesTicker,
normalizeMentionRow,
normalizeNewsRow,
normalizeNewsTimelineRow,
normalizeSignalDirection,
normalizeSignalRow,
normalizeTradeRow,
parsePointTime,
resolveEventCategory,
snippetText
} from './explainUtils';
function tradeSideLabel(value) {
if (value === 'LONG') return '做多';
if (value === 'SHORT') return '做空';
return value || '交易';
}
export default function useExplainModel({
tickers,
holdings,
trades,
leaderboard,
feed,
priceHistoryByTicker,
ohlcHistoryByTicker,
selectedSymbol,
explainEventsSnapshot,
newsSnapshot,
selectedEventDate,
activeEventCategory,
activeNewsCategory,
activeNewsSentiment = 'all'
}) {
const availableSymbols = useMemo(() => (
Array.isArray(tickers)
? tickers.map((ticker) => ticker?.symbol).filter((symbol) => typeof symbol === 'string' && symbol.trim())
: []
), [tickers]);
const selectedTicker = useMemo(
() => tickers.find((ticker) => ticker.symbol === selectedSymbol) || null,
[selectedSymbol, tickers]
);
const holding = useMemo(
() => holdings.find((item) => item.ticker === selectedSymbol) || null,
[holdings, selectedSymbol]
);
const fallbackTrades = useMemo(
() => trades
.filter((trade) => trade.ticker === selectedSymbol)
.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()),
[selectedSymbol, trades]
);
const tickerSignals = useMemo(() => {
const snapshotSignals = Array.isArray(explainEventsSnapshot?.signals)
? explainEventsSnapshot.signals.map((signal, index) => normalizeSignalRow(signal, index)).filter(Boolean)
: [];
if (snapshotSignals.length > 0) {
return snapshotSignals.sort((a, b) => new Date(b.timestamp || b.date).getTime() - new Date(a.timestamp || a.date).getTime());
}
if (!selectedSymbol) return [];
return (Array.isArray(leaderboard) ? leaderboard : []).flatMap((agent) => {
const signals = Array.isArray(agent.signals) ? agent.signals : [];
return signals
.filter((signal) => signal.ticker === selectedSymbol)
.map((signal) => ({
agentId: agent.agentId,
agentName: agent.name,
role: agent.role,
...signal,
normalizedDirection: normalizeSignalDirection(signal.signal)
}));
}).sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
}, [explainEventsSnapshot, leaderboard, selectedSymbol]);
const signalSummary = useMemo(() => {
const summary = { bullish: 0, bearish: 0, neutral: 0 };
tickerSignals.forEach((signal) => {
summary[signal.normalizedDirection] += 1;
});
return summary;
}, [tickerSignals]);
const fallbackRecentMentions = useMemo(() => {
const flattened = flattenFeedMessages(feed);
return flattened
.filter((message) => message.agent !== 'System' && includesTicker(message.content, selectedSymbol))
.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())
.slice(0, 8);
}, [feed, selectedSymbol]);
const tickerTrades = useMemo(() => {
const snapshotTrades = Array.isArray(explainEventsSnapshot?.trades)
? explainEventsSnapshot.trades.map((trade, index) => normalizeTradeRow(trade, index)).filter(Boolean)
: [];
if (snapshotTrades.length > 0) {
return snapshotTrades.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
}
return fallbackTrades;
}, [explainEventsSnapshot, fallbackTrades]);
const recentMentions = useMemo(() => {
const snapshotMentions = Array.isArray(explainEventsSnapshot?.events)
? explainEventsSnapshot.events
.map((event, index) => normalizeMentionRow(event, index))
.filter(Boolean)
.slice(0, 8)
: [];
if (snapshotMentions.length > 0) {
return snapshotMentions.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
}
return fallbackRecentMentions;
}, [explainEventsSnapshot, fallbackRecentMentions]);
const tickerNews = useMemo(() => {
const items = Array.isArray(newsSnapshot?.items)
? newsSnapshot.items.map((item, index) => normalizeNewsRow(item, index)).filter(Boolean)
: [];
return items.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
}, [newsSnapshot]);
const dateScopedNews = useMemo(() => {
if (!selectedEventDate || !newsSnapshot?.byDate || typeof newsSnapshot.byDate !== 'object') {
return [];
}
const rows = Array.isArray(newsSnapshot.byDate[selectedEventDate])
? newsSnapshot.byDate[selectedEventDate]
: [];
return rows.map((item, index) => normalizeNewsRow(item, index)).filter(Boolean);
}, [newsSnapshot, selectedEventDate]);
const visibleNews = useMemo(() => {
if (!selectedEventDate) {
return tickerNews;
}
if (dateScopedNews.length > 0) {
return dateScopedNews.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
}
const scoped = tickerNews.filter((item) => item.dateKey === selectedEventDate);
return scoped.length > 0 ? scoped : tickerNews;
}, [dateScopedNews, selectedEventDate, tickerNews]);
const tickerNewsTimeline = useMemo(() => {
const items = Array.isArray(newsSnapshot?.timeline)
? newsSnapshot.timeline.map((item, index) => normalizeNewsTimelineRow(item, index)).filter(Boolean)
: [];
return items.sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime());
}, [newsSnapshot]);
const newsCategories = useMemo(() => (
newsSnapshot?.categories && typeof newsSnapshot.categories === 'object'
? newsSnapshot.categories
: {}
), [newsSnapshot]);
const visibleNewsByCategory = useMemo(() => {
let scopedNews = visibleNews;
if (activeNewsCategory !== 'all') {
const categoryMeta = newsCategories?.[activeNewsCategory];
const allowedIds = Array.isArray(categoryMeta?.article_ids)
? new Set(categoryMeta.article_ids)
: null;
if (allowedIds && allowedIds.size > 0) {
scopedNews = scopedNews.filter((item) => allowedIds.has(item.id));
}
}
if (activeNewsSentiment === 'all') {
return scopedNews;
}
return scopedNews.filter((item) => {
const sentiment = String(item.sentiment || '').trim().toLowerCase() || 'neutral';
return sentiment === activeNewsSentiment;
});
}, [activeNewsCategory, activeNewsSentiment, newsCategories, visibleNews]);
const selectedRangeWindow = useMemo(() => {
if (!selectedEventDate) return null;
const endDate = new Date(`${selectedEventDate}T00:00:00`);
if (Number.isNaN(endDate.getTime())) return null;
const startDate = new Date(endDate);
startDate.setDate(startDate.getDate() - 6);
return {
startDate: startDate.toISOString().slice(0, 10),
endDate: selectedEventDate
};
}, [selectedEventDate]);
const selectedRangeExplain = useMemo(() => {
if (!selectedRangeWindow) return null;
const key = `${selectedRangeWindow.startDate}:${selectedRangeWindow.endDate}`;
return newsSnapshot?.rangeExplainCache?.[key] || null;
}, [newsSnapshot, selectedRangeWindow]);
const selectedStory = useMemo(() => {
const storyCache = newsSnapshot?.storyCache;
if (!storyCache || typeof storyCache !== 'object') {
return null;
}
const keys = Object.keys(storyCache).sort();
if (!keys.length) {
return null;
}
return storyCache[keys[keys.length - 1]] || null;
}, [newsSnapshot]);
const selectedSimilarDays = useMemo(() => {
if (!selectedEventDate) {
return null;
}
const similarCache = newsSnapshot?.similarDaysCache;
if (!similarCache || typeof similarCache !== 'object') {
return null;
}
return similarCache[selectedEventDate] || null;
}, [newsSnapshot, selectedEventDate]);
const latestSignal = tickerSignals[0] || null;
const priceColor = selectedTicker?.change > 0 ? '#00C853' : selectedTicker?.change < 0 ? '#FF1744' : '#000000';
const exposureWeight = holding && Number.isFinite(Number(holding.weight)) ? Number(holding.weight) * 100 : null;
const recentTrade = tickerTrades[0] || null;
const ohlcSeries = useMemo(() => {
const raw = ohlcHistoryByTicker?.[selectedSymbol];
return Array.isArray(raw) ? raw.filter((candle) => Number.isFinite(Number(candle.close))).slice(-60) : [];
}, [ohlcHistoryByTicker, selectedSymbol]);
const priceSeries = useMemo(() => {
const raw = priceHistoryByTicker?.[selectedSymbol];
return Array.isArray(raw) ? raw.filter((point) => Number.isFinite(Number(point.price))).slice(-60) : [];
}, [priceHistoryByTicker, selectedSymbol]);
const explainSummary = useMemo(() => {
if (!selectedSymbol) return [];
const lines = [];
if (latestSignal) {
const directionText = latestSignal.normalizedDirection === 'bullish'
? '偏多'
: latestSignal.normalizedDirection === 'bearish'
? '偏空'
: '观望';
lines.push(`最新分析师结论为${directionText},来自${latestSignal.agentName}`);
} else {
lines.push('当前还没有形成结构化分析师信号,更多依赖讨论内容和持仓状态。');
}
if (holding) {
lines.push(`组合当前持有 ${selectedSymbol},权重约 ${exposureWeight != null ? `${exposureWeight.toFixed(2)}%` : '0.00%'}`);
} else {
lines.push(`组合当前未持有 ${selectedSymbol},仍处于观察阶段。`);
}
if (recentTrade) {
lines.push(`最近一次相关交易为${tradeSideLabel(recentTrade.side)},时间是 ${formatDateTime(recentTrade.timestamp)}`);
}
if (recentMentions.length > 0) {
lines.push(`最近讨论中共有 ${recentMentions.length} 条直接提及 ${selectedSymbol} 的观点。`);
}
return lines;
}, [exposureWeight, holding, latestSignal, recentMentions.length, recentTrade, selectedSymbol]);
const explainTimeline = useMemo(() => {
const signalEvents = tickerSignals.slice(0, 12).map((signal, index) => ({
id: `signal-${signal.agentId}-${signal.date}-${index}`,
type: 'signal',
timestamp: new Date(`${signal.date}T08:00:00`).toISOString(),
title: `${signal.agentName} 给出${signal.normalizedDirection === 'bullish' ? '看涨' : signal.normalizedDirection === 'bearish' ? '看跌' : '中性'}信号`,
meta: signal.role,
body: typeof signal.real_return === 'number'
? `后验收益 ${signal.real_return >= 0 ? '+' : ''}${(signal.real_return * 100).toFixed(2)}%`
: '该信号暂未完成后验评估',
tone: signal.normalizedDirection === 'bullish' ? 'positive' : signal.normalizedDirection === 'bearish' ? 'negative' : 'neutral'
}));
const mentionEvents = recentMentions.slice(0, 12).map((message, index) => ({
id: `mention-${message.feedId || message.id}-${index}`,
type: 'mention',
timestamp: message.timestamp,
title: `${message.agent || '未知角色'}${message.conferenceTitle || '讨论流'}中提及 ${selectedSymbol}`,
meta: message.conferenceTitle || (message.feedType === 'conference' ? '投资讨论' : '即时消息'),
body: snippetText(message.content, selectedSymbol),
tone: 'neutral'
}));
const tradeEvents = tickerTrades.slice(0, 12).map((trade, index) => ({
id: `trade-${trade.id || `${trade.ticker}-${trade.timestamp}-${index}`}`,
type: 'trade',
timestamp: trade.timestamp,
title: `${tradeSideLabel(trade.side)} ${trade.qty}`,
meta: '交易执行',
body: `成交价 $${Number(trade.price).toFixed(2)}`,
tone: trade.side === 'LONG' ? 'positive' : trade.side === 'SHORT' ? 'negative' : 'neutral'
}));
const fallbackTimeline = [...signalEvents, ...mentionEvents, ...tradeEvents]
.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())
.slice(0, 24)
.map((event) => ({
...event,
dateKey: eventDateKey(event.timestamp),
category: resolveEventCategory(event)
}));
if (!explainEventsSnapshot) {
return fallbackTimeline;
}
const dbSignalEvents = (Array.isArray(explainEventsSnapshot.signals) ? explainEventsSnapshot.signals : [])
.map((signal, index) => {
if (signal?.type === 'signal' && signal?.timestamp) {
return signal;
}
const normalized = normalizeSignalRow(signal, index);
if (!normalized) return null;
return {
id: normalized.id,
type: 'signal',
timestamp: normalized.timestamp || (normalized.date ? new Date(`${normalized.date}T08:00:00`).toISOString() : null),
title: `${normalized.agentName} 给出${
normalized.normalizedDirection === 'bullish'
? '看涨'
: normalized.normalizedDirection === 'bearish'
? '看跌'
: '中性'
}信号`,
meta: normalized.role,
body: typeof normalized.real_return === 'number'
? `后验收益 ${normalized.real_return >= 0 ? '+' : ''}${(normalized.real_return * 100).toFixed(2)}%`
: '该信号暂未完成后验评估',
tone: normalized.normalizedDirection === 'bullish'
? 'positive'
: normalized.normalizedDirection === 'bearish'
? 'negative'
: 'neutral'
};
})
.filter(Boolean);
const dbMentionEvents = (Array.isArray(explainEventsSnapshot.events) ? explainEventsSnapshot.events : [])
.map((event, index) => {
if (event?.type === 'mention' && event?.timestamp) {
return event;
}
const normalized = normalizeMentionRow(event, index);
if (!normalized) return null;
return {
id: normalized.id,
type: 'mention',
timestamp: normalized.timestamp,
title: `${normalized.agent || '未知角色'}${normalized.conferenceTitle || '讨论流'}中提及 ${selectedSymbol}`,
meta: normalized.conferenceTitle || (normalized.feedType === 'conference' ? '投资讨论' : '即时消息'),
body: snippetText(normalized.content, selectedSymbol),
tone: 'neutral'
};
})
.filter(Boolean);
const dbTradeEvents = (Array.isArray(explainEventsSnapshot.trades) ? explainEventsSnapshot.trades : [])
.map((trade, index) => {
if (trade?.type === 'trade' && trade?.timestamp) {
return trade;
}
const normalized = normalizeTradeRow(trade, index);
if (!normalized) return null;
return {
id: normalized.id,
type: 'trade',
timestamp: normalized.timestamp,
title: `${tradeSideLabel(normalized.side)} ${normalized.qty}`,
meta: '交易执行',
body: `成交价 $${Number(normalized.price).toFixed(2)}`,
tone: normalized.side === 'LONG' ? 'positive' : normalized.side === 'SHORT' ? 'negative' : 'neutral'
};
})
.filter(Boolean);
const dbEvents = [
...dbSignalEvents,
...dbMentionEvents,
...dbTradeEvents
]
.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())
.slice(0, 24)
.map((event) => ({
...event,
dateKey: eventDateKey(event.timestamp),
category: resolveEventCategory(event)
}));
return dbEvents.length > 0 ? dbEvents : fallbackTimeline;
}, [explainEventsSnapshot, recentMentions, selectedSymbol, tickerSignals, tickerTrades]);
const availableEventDates = useMemo(
() => Array.from(new Set(explainTimeline.map((event) => event.dateKey).filter(Boolean))),
[explainTimeline]
);
const eventCategoryCounts = useMemo(() => {
const scopedEvents = selectedEventDate
? explainTimeline.filter((event) => event.dateKey === selectedEventDate)
: explainTimeline;
const counts = { all: scopedEvents.length };
scopedEvents.forEach((event) => {
counts[event.category] = (counts[event.category] || 0) + 1;
});
return counts;
}, [explainTimeline, selectedEventDate]);
const visibleExplainEvents = useMemo(() => explainTimeline.filter((event) => {
if (selectedEventDate && event.dateKey !== selectedEventDate) {
return false;
}
if (activeEventCategory !== 'all' && event.category !== activeEventCategory) {
return false;
}
return true;
}), [activeEventCategory, explainTimeline, selectedEventDate]);
const chartModel = useMemo(() => {
const width = 720;
const height = 220;
const padding = 18;
if (!ohlcSeries.length && !priceSeries.length) {
return {
width,
height,
padding,
path: '',
minPrice: null,
maxPrice: null,
markers: [],
candles: [],
linePoints: [],
bucketCount: 0
};
}
if (ohlcSeries.length > 1) {
const prices = ohlcSeries.flatMap((candle) => [Number(candle.low), Number(candle.high)]);
const minPrice = Math.min(...prices);
const maxPrice = Math.max(...prices);
const span = maxPrice - minPrice || 1;
const innerWidth = width - padding * 2;
const innerHeight = height - padding * 2;
const candleWidth = Math.max(8, Math.min(18, (innerWidth / ohlcSeries.length) * 0.55));
const startTime = parsePointTime({ timestamp: ohlcSeries[0]?.time });
const endTime = parsePointTime({ timestamp: ohlcSeries[ohlcSeries.length - 1]?.time });
const timeSpan = Math.max(endTime - startTime, 1);
const candles = ohlcSeries.map((candle, index) => {
const centerX = padding + ((index + 0.5) * innerWidth) / Math.max(ohlcSeries.length, 1);
const openY = height - padding - ((Number(candle.open) - minPrice) / span) * innerHeight;
const closeY = height - padding - ((Number(candle.close) - minPrice) / span) * innerHeight;
const highY = height - padding - ((Number(candle.high) - minPrice) / span) * innerHeight;
const lowY = height - padding - ((Number(candle.low) - minPrice) / span) * innerHeight;
return {
...candle,
id: `${candle.time || index}`,
centerX,
x: centerX - candleWidth / 2,
width: candleWidth,
openY,
closeY,
highY,
lowY,
bodyY: Math.min(openY, closeY),
bodyHeight: Math.max(Math.abs(closeY - openY), 2)
};
});
const explainMarkers = explainTimeline.slice(0, 8).map((event) => {
const timestamp = new Date(event.timestamp).getTime();
if (!Number.isFinite(timestamp)) return null;
const ratio = Math.min(1, Math.max(0, (timestamp - startTime) / timeSpan));
const nearestCandleIndex = candles.length <= 1
? 0
: Math.min(candles.length - 1, Math.max(0, Math.round(ratio * Math.max(candles.length - 1, 1))));
const nearestCandle = candles[nearestCandleIndex] || null;
const x = nearestCandle ? nearestCandle.centerX : padding + ratio * innerWidth;
const price = nearestCandle ? Number(nearestCandle.close) : Number(ohlcSeries[ohlcSeries.length - 1]?.close ?? maxPrice);
const y = height - padding - ((price - minPrice) / span) * innerHeight;
return { ...event, x, y, isSelected: event.dateKey === selectedEventDate, markerType: 'event' };
}).filter(Boolean);
const newsMarkers = tickerNewsTimeline.slice(-20).map((item, index) => {
const timestamp = new Date(`${item.date}T12:00:00`).getTime();
if (!Number.isFinite(timestamp)) return null;
const ratio = Math.min(1, Math.max(0, (timestamp - startTime) / timeSpan));
const nearestCandleIndex = candles.length <= 1
? 0
: Math.min(candles.length - 1, Math.max(0, Math.round(ratio * Math.max(candles.length - 1, 1))));
const nearestCandle = candles[nearestCandleIndex] || null;
const x = nearestCandle ? nearestCandle.centerX : padding + ratio * innerWidth;
const price = nearestCandle ? Number(nearestCandle.close) : Number(ohlcSeries[ohlcSeries.length - 1]?.close ?? maxPrice);
const y = height - padding - ((price - minPrice) / span) * innerHeight;
return {
id: item.id || `news-marker-${index}`,
title: item.topTitle || `当日 ${item.count} 条新闻`,
dateKey: item.dateKey,
tone: 'news',
x,
y,
isSelected: item.dateKey === selectedEventDate,
markerType: 'news',
count: item.count
};
}).filter(Boolean);
return {
width,
height,
padding,
path: '',
minPrice,
maxPrice,
markers: [...newsMarkers, ...explainMarkers],
candles,
linePoints: [],
bucketCount: candles.length
};
}
const prices = priceSeries.map((point) => Number(point.price));
const minPrice = Math.min(...prices);
const maxPrice = Math.max(...prices);
const span = maxPrice - minPrice || 1;
const innerWidth = width - padding * 2;
const innerHeight = height - padding * 2;
const startTime = parsePointTime(priceSeries[0]);
const endTime = parsePointTime(priceSeries[priceSeries.length - 1]);
const timeSpan = Math.max(endTime - startTime, 1);
const candles = aggregatePriceSeriesToCandles(priceSeries);
const linePoints = priceSeries.map((point, index) => {
const x = padding + (innerWidth * index) / Math.max(priceSeries.length - 1, 1);
const y = height - padding - ((Number(point.price) - minPrice) / span) * innerHeight;
return { x, y };
});
const candleWidth = candles.length > 1
? Math.max(8, Math.min(24, (innerWidth / candles.length) * 0.58))
: 14;
const mappedCandles = candles.map((candle, index) => {
const centerX = padding + ((index + 0.5) * innerWidth) / Math.max(candles.length, 1);
const openY = height - padding - ((candle.open - minPrice) / span) * innerHeight;
const closeY = height - padding - ((candle.close - minPrice) / span) * innerHeight;
const highY = height - padding - ((candle.high - minPrice) / span) * innerHeight;
const lowY = height - padding - ((candle.low - minPrice) / span) * innerHeight;
return {
...candle,
centerX,
x: centerX - candleWidth / 2,
width: candleWidth,
openY,
closeY,
highY,
lowY,
bodyY: Math.min(openY, closeY),
bodyHeight: Math.max(Math.abs(closeY - openY), 2)
};
});
const explainMarkers = explainTimeline.slice(0, 8).map((event) => {
const timestamp = new Date(event.timestamp).getTime();
if (!Number.isFinite(timestamp)) return null;
const ratio = Math.min(1, Math.max(0, (timestamp - startTime) / timeSpan));
const nearestCandleIndex = mappedCandles.length <= 1
? 0
: Math.min(
mappedCandles.length - 1,
Math.max(0, Math.round(ratio * Math.max(mappedCandles.length - 1, 1)))
);
const nearestCandle = mappedCandles[nearestCandleIndex] || null;
const x = nearestCandle ? nearestCandle.centerX : padding + ratio * innerWidth;
const price = nearestCandle ? nearestCandle.close : Number(priceSeries[priceSeries.length - 1]?.price ?? prices[prices.length - 1]);
const y = height - padding - ((price - minPrice) / span) * innerHeight;
return { ...event, x, y, isSelected: event.dateKey === selectedEventDate, markerType: 'event' };
}).filter(Boolean);
const newsMarkers = tickerNewsTimeline.slice(-20).map((item, index) => {
const timestamp = new Date(`${item.date}T12:00:00`).getTime();
if (!Number.isFinite(timestamp)) return null;
const ratio = Math.min(1, Math.max(0, (timestamp - startTime) / timeSpan));
const nearestCandleIndex = mappedCandles.length <= 1
? 0
: Math.min(
mappedCandles.length - 1,
Math.max(0, Math.round(ratio * Math.max(mappedCandles.length - 1, 1)))
);
const nearestCandle = mappedCandles[nearestCandleIndex] || null;
const x = nearestCandle ? nearestCandle.centerX : padding + ratio * innerWidth;
const price = nearestCandle ? nearestCandle.close : Number(priceSeries[priceSeries.length - 1]?.price ?? prices[prices.length - 1]);
const y = height - padding - ((price - minPrice) / span) * innerHeight;
return {
id: item.id || `news-marker-${index}`,
title: item.topTitle || `当日 ${item.count} 条新闻`,
dateKey: item.dateKey,
tone: 'news',
x,
y,
isSelected: item.dateKey === selectedEventDate,
markerType: 'news',
count: item.count
};
}).filter(Boolean);
return {
width,
height,
padding,
path: buildLinePath(priceSeries, width, height, padding),
minPrice,
maxPrice,
markers: [...newsMarkers, ...explainMarkers],
candles: mappedCandles,
linePoints,
bucketCount: mappedCandles.length
};
}, [explainTimeline, ohlcSeries, priceSeries, selectedEventDate, tickerNewsTimeline]);
return {
availableSymbols,
selectedTicker,
holding,
tickerSignals,
signalSummary,
tickerTrades,
recentMentions,
tickerNews,
visibleNews,
newsCategories,
visibleNewsByCategory,
selectedRangeWindow,
selectedRangeExplain,
selectedStory,
selectedSimilarDays,
latestSignal,
priceColor,
exposureWeight,
recentTrade,
ohlcSeries,
priceSeries,
explainSummary,
explainTimeline,
availableEventDates,
eventCategoryCounts,
visibleExplainEvents,
chartModel
};
}

View File

@@ -0,0 +1,156 @@
import React from 'react';
import { describe, expect, it } from 'vitest';
import { renderHook } from '@testing-library/react';
import useExplainModel from './useExplainModel';
function buildBaseProps() {
return {
tickers: [{ symbol: 'AAPL', price: 105.12, change: 1.34 }],
holdings: [{ ticker: 'AAPL', quantity: 10, weight: 0.2, marketValue: 1051.2, currentPrice: 105.12 }],
trades: [],
leaderboard: [],
feed: [],
priceHistoryByTicker: {
AAPL: [
{ timestamp: '2026-03-08T10:00:00Z', price: 100 },
{ timestamp: '2026-03-09T10:00:00Z', price: 103 },
{ timestamp: '2026-03-10T10:00:00Z', price: 105 }
]
},
ohlcHistoryByTicker: {},
selectedSymbol: 'AAPL',
explainEventsSnapshot: {
signals: [
{
id: 'sig-1',
ticker: 'AAPL',
date: '2026-03-10',
signal: 'bullish',
confidence: 0.88,
agent_id: 'agent-1',
agent_name: 'Alpha',
role: 'technical'
}
],
events: [
{
id: 'mention-1',
timestamp: '2026-03-10T12:00:00Z',
agent: 'Research',
body: 'AAPL momentum remains strong after earnings.',
meta: 'morning note'
}
],
trades: [
{
id: 'trade-1',
timestamp: '2026-03-10T15:00:00Z',
ticker: 'AAPL',
side: 'LONG',
qty: 5,
price: 104.5
}
]
},
newsSnapshot: {
items: [
{
id: 'news-1',
ticker: 'AAPL',
date: '2026-03-10T09:00:00Z',
title: 'Apple earnings beat expectations',
summary: 'Revenue topped consensus estimates.',
source: 'Polygon',
sentiment: 'positive'
},
{
id: 'news-2',
ticker: 'AAPL',
date: '2026-03-09T09:00:00Z',
title: 'Supplier update',
summary: 'Supply chain improves.',
source: 'Polygon',
sentiment: 'negative'
}
],
timeline: [
{ id: 'timeline-1', date: '2026-03-09', count: 1, source_count: 1, top_title: 'Supplier update' },
{ id: 'timeline-2', date: '2026-03-10', count: 1, source_count: 1, top_title: 'Apple earnings beat expectations' }
],
categories: {
earnings: {
count: 1,
article_ids: ['news-1']
}
},
rangeExplainCache: {
'2026-03-03:2026-03-10': {
summary: '区间内主要由财报催化推动。'
}
},
similarDaysCache: {
'2026-03-10': {
target_features: {
sentiment_score: 0.5,
n_articles: 2
},
items: [
{
date: '2026-02-18',
score: 0.92,
n_articles: 2,
sentiment_score: 0.4
}
]
}
}
},
selectedEventDate: '2026-03-10',
activeEventCategory: 'all',
activeNewsCategory: 'earnings',
activeNewsSentiment: 'all'
};
}
describe('useExplainModel', () => {
it('derives visible news and range explain data from snapshots', () => {
const { result } = renderHook(() => useExplainModel(buildBaseProps()));
expect(result.current.availableSymbols).toEqual(['AAPL']);
expect(result.current.visibleNews).toHaveLength(1);
expect(result.current.visibleNewsByCategory).toHaveLength(1);
expect(result.current.visibleNewsByCategory[0].id).toBe('news-1');
expect(result.current.selectedRangeWindow).toEqual({
startDate: '2026-03-03',
endDate: '2026-03-10'
});
expect(result.current.selectedRangeExplain).toEqual({
summary: '区间内主要由财报催化推动。'
});
expect(result.current.selectedSimilarDays?.items).toHaveLength(1);
});
it('builds timeline, counts, and chart markers from explain data', () => {
const { result } = renderHook(() => useExplainModel(buildBaseProps()));
expect(result.current.availableEventDates).toContain('2026-03-10');
expect(result.current.eventCategoryCounts.all).toBe(3);
expect(result.current.eventCategoryCounts.technical).toBe(1);
expect(result.current.eventCategoryCounts.discussion).toBe(1);
expect(result.current.eventCategoryCounts.trade).toBe(1);
expect(result.current.visibleExplainEvents).toHaveLength(3);
expect(result.current.chartModel.markers.length).toBeGreaterThan(0);
expect(result.current.chartModel.path).toMatch(/^M/);
});
it('filters visible news by sentiment when requested', () => {
const props = buildBaseProps();
props.activeNewsCategory = 'all';
props.activeNewsSentiment = 'positive';
const { result } = renderHook(() => useExplainModel(props));
expect(result.current.visibleNewsByCategory).toHaveLength(1);
expect(result.current.visibleNewsByCategory[0].id).toBe('news-1');
});
});

View File

@@ -8,9 +8,11 @@ export default defineConfig({
allowedHosts: ["localhost", "trading.evoagents.cn","www.evoagents.cn"]
},
plugins: [react(), tsconfigPaths(),tailwindcss()],
test: {
environment: "jsdom"
},
preview: {
host: "0.0.0.0",
port: 4173
},
});

View File

@@ -66,7 +66,8 @@ evotraders = "backend.cli:app"
[tool.setuptools]
packages = ["backend", "backend.agents", "backend.config",
"backend.data", "backend.llm",
"backend.tools", "backend.utils", "backend.services"]
"backend.tools", "backend.utils", "backend.services",
"backend.explain", "backend.enrich"]
[tool.ruff]
line-length = 88
@@ -77,4 +78,4 @@ testpaths = ["backend/tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
asyncio_default_fixture_loop_scope = "function"

View File

@@ -1,12 +1,7 @@
---
tickers:
- AAPL
- MSFT
- GOOGL
- NVDA
- TSLA
- META
- AMZN
- AAPL
- MSFT
initial_cash: 100000
margin_requirement: 0.0
enable_memory: false

File diff suppressed because one or more lines are too long