Add explain analysis workflow and UI

This commit is contained in:
2026-03-16 22:28:41 +08:00
parent 3a5558b576
commit 1f5ee3698e
49 changed files with 8888 additions and 1476 deletions

236
backend/tests/test_cli.py Normal file
View File

@@ -0,0 +1,236 @@
# -*- coding: utf-8 -*-
from pathlib import Path
from backend import cli
def test_live_runs_incremental_market_store_update_before_start(monkeypatch, tmp_path):
project_root = tmp_path
(project_root / ".env").write_text("FINNHUB_API_KEY=test\n", encoding="utf-8")
calls = []
monkeypatch.setattr(cli, "get_project_root", lambda: project_root)
monkeypatch.setattr(cli, "handle_history_cleanup", lambda config_name, auto_clean=False: None)
monkeypatch.setattr(cli, "run_data_updater", lambda project_root: calls.append(("run_data_updater", project_root)))
monkeypatch.setattr(
cli,
"auto_update_market_store",
lambda config_name, end_date=None: calls.append(("auto_update_market_store", config_name, end_date)),
)
monkeypatch.setattr(
cli,
"auto_enrich_market_store",
lambda config_name, end_date=None, lookback_days=120, force=False: calls.append(
("auto_enrich_market_store", config_name, end_date, lookback_days, force)
),
)
monkeypatch.setattr(cli.os, "chdir", lambda path: calls.append(("chdir", Path(path))))
def fake_run(cmd, check=True, **kwargs):
calls.append(("subprocess.run", cmd, check))
return 0
monkeypatch.setattr(cli.subprocess, "run", fake_run)
cli.live(
mock=False,
config_name="smoke_fullstack",
host="0.0.0.0",
port=8765,
trigger_time="now",
poll_interval=10,
clean=False,
enable_memory=False,
)
assert any(item[0] == "run_data_updater" for item in calls)
assert any(
item[0] == "auto_update_market_store" and item[1] == "smoke_fullstack"
for item in calls
)
assert any(
item[0] == "auto_enrich_market_store" and item[1] == "smoke_fullstack"
for item in calls
)
run_call = next(item for item in calls if item[0] == "subprocess.run")
assert run_call[1][:6] == [
cli.sys.executable,
"-u",
"-m",
"backend.main",
"--mode",
"live",
]
def test_backtest_runs_full_market_store_prepare_before_start(monkeypatch, tmp_path):
project_root = tmp_path
calls = []
monkeypatch.setattr(cli, "get_project_root", lambda: project_root)
monkeypatch.setattr(cli, "handle_history_cleanup", lambda config_name, auto_clean=False: None)
monkeypatch.setattr(cli, "run_data_updater", lambda project_root: calls.append(("run_data_updater", project_root)))
monkeypatch.setattr(
cli,
"auto_prepare_backtest_market_store",
lambda config_name, start_date, end_date: calls.append(
("auto_prepare_backtest_market_store", config_name, start_date, end_date)
),
)
monkeypatch.setattr(
cli,
"auto_enrich_market_store",
lambda config_name, end_date=None, lookback_days=120, force=False: calls.append(
("auto_enrich_market_store", config_name, end_date, lookback_days, force)
),
)
monkeypatch.setattr(cli.os, "chdir", lambda path: calls.append(("chdir", Path(path))))
def fake_run(cmd, check=True, **kwargs):
calls.append(("subprocess.run", cmd, check))
return 0
monkeypatch.setattr(cli.subprocess, "run", fake_run)
cli.backtest(
start="2026-03-01",
end="2026-03-10",
config_name="smoke_fullstack",
host="0.0.0.0",
port=8765,
poll_interval=10,
clean=False,
enable_memory=False,
)
assert any(item[0] == "run_data_updater" for item in calls)
assert any(
item[0] == "auto_prepare_backtest_market_store"
and item[1:] == ("smoke_fullstack", "2026-03-01", "2026-03-10")
for item in calls
)
assert any(
item[0] == "auto_enrich_market_store"
and item[1] == "smoke_fullstack"
and item[2] == "2026-03-10"
for item in calls
)
run_call = next(item for item in calls if item[0] == "subprocess.run")
assert run_call[1][:6] == [
cli.sys.executable,
"-u",
"-m",
"backend.main",
"--mode",
"backtest",
]
def test_ingest_enrich_runs_batch_enrichment(monkeypatch):
calls = []
monkeypatch.setattr(cli, "_resolve_symbols", lambda raw_tickers, config_name=None: ["AAPL", "MSFT"])
class DummyStore:
pass
monkeypatch.setattr(cli, "MarketStore", lambda: DummyStore())
monkeypatch.setattr(
cli,
"enrich_symbols",
lambda store, symbols, start_date=None, end_date=None, limit=200, analysis_source="local", skip_existing=True: calls.append(
("enrich_symbols", symbols, start_date, end_date, limit, analysis_source, skip_existing)
) or [
{
"symbol": symbol,
"news_count": 3,
"queued_count": 3,
"analyzed": 3,
"skipped_existing_count": 0,
"deduped_count": 0,
"llm_count": 0,
"local_count": 3,
}
for symbol in symbols
],
)
cli.ingest_enrich(
tickers=None,
start="2026-03-01",
end="2026-03-10",
limit=150,
force=False,
config_name="smoke_fullstack",
)
assert calls == [
("enrich_symbols", ["AAPL", "MSFT"], "2026-03-01", "2026-03-10", 150, "local", True)
]
def test_ingest_report_reads_market_store_report(monkeypatch):
calls = []
printed = []
monkeypatch.setattr(cli, "_resolve_symbols", lambda raw_tickers, config_name=None: ["AAPL"])
class DummyStore:
def get_enrich_report(self, symbols=None, start_date=None, end_date=None):
calls.append(("get_enrich_report", symbols, start_date, end_date))
return [
{
"symbol": "AAPL",
"raw_news_count": 10,
"analyzed_news_count": 8,
"coverage_pct": 80.0,
"llm_count": 5,
"local_count": 3,
"latest_trade_date": "2026-03-16",
"latest_analysis_at": "2026-03-16T09:00:00",
}
]
monkeypatch.setattr(cli, "MarketStore", lambda: DummyStore())
monkeypatch.setattr(cli, "get_explain_model_info", lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"})
monkeypatch.setattr(cli, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(cli.console, "print", lambda value: printed.append(value))
cli.ingest_report(
tickers=None,
start="2026-03-01",
end="2026-03-16",
config_name="smoke_fullstack",
only_problematic=False,
)
assert calls == [
("get_enrich_report", ["AAPL"], "2026-03-01", "2026-03-16")
]
assert printed
assert getattr(printed[0], "caption", "") == "Explain LLM: DASHSCOPE:qwen-max"
def test_filter_problematic_report_rows_keeps_low_coverage_and_no_llm():
rows = [
{
"symbol": "AAPL",
"coverage_pct": 100.0,
"llm_count": 2,
},
{
"symbol": "MSFT",
"coverage_pct": 80.0,
"llm_count": 1,
},
{
"symbol": "NVDA",
"coverage_pct": 100.0,
"llm_count": 0,
},
]
filtered = cli._filter_problematic_report_rows(rows)
assert [row["symbol"] for row in filtered] == ["MSFT", "NVDA"]

View File

@@ -0,0 +1,384 @@
# -*- coding: utf-8 -*-
import json
from types import SimpleNamespace
import pytest
from backend.services.gateway import Gateway
import backend.services.gateway as gateway_module
class DummyWebSocket:
def __init__(self):
self.messages = []
async def send(self, payload: str):
self.messages.append(json.loads(payload))
class DummyStateSync:
def __init__(self, current_date="2026-03-16"):
self.state = {"current_date": current_date}
self.system_messages = []
def set_broadcast_fn(self, _fn):
return None
def update_state(self, *_args, **_kwargs):
return None
async def on_system_message(self, message):
self.system_messages.append(message)
class FakeMarketStore:
def __init__(self):
self.calls = []
def get_news_timeline_enriched(self, symbol, *, start_date=None, end_date=None):
self.calls.append(("get_news_timeline_enriched", symbol, start_date, end_date))
return [{"date": end_date, "count": 2, "source_count": 1, "top_title": "Top", "positive_count": 1}]
def get_news_items(self, symbol, *, start_date=None, end_date=None, limit=100):
self.calls.append(("get_news_items", symbol, start_date, end_date, limit))
return [
{
"id": "news-1",
"ticker": symbol,
"date": end_date,
"trade_date": end_date,
"title": "Title",
"summary": "Summary",
"source": "polygon",
}
]
def get_news_items_enriched(self, symbol, *, start_date=None, end_date=None, trade_date=None, limit=100):
self.calls.append(("get_news_items_enriched", symbol, start_date, end_date, trade_date, limit))
target_date = trade_date or end_date
return [
{
"id": "news-1",
"ticker": symbol,
"date": target_date,
"trade_date": target_date,
"title": "Title",
"summary": "Summary",
"source": "polygon",
"sentiment": "negative",
"relevance": "high",
"key_discussion": "Key discussion",
}
]
def get_news_by_ids_enriched(self, symbol, article_ids):
self.calls.append(("get_news_by_ids_enriched", symbol, list(article_ids)))
return [{"id": article_ids[0], "ticker": symbol, "date": "2026-03-16", "sentiment": "negative"}]
def get_news_categories_enriched(self, symbol, *, start_date=None, end_date=None, limit=200):
self.calls.append(("get_news_categories_enriched", symbol, start_date, end_date, limit))
return {"macro": {"label": "宏观", "count": 1, "article_ids": ["news-1"], "positive_ids": [], "negative_ids": ["news-1"], "neutral_ids": []}}
def get_story_cache(self, symbol, *, as_of_date):
self.calls.append(("get_story_cache", symbol, as_of_date))
return None
def upsert_story_cache(self, symbol, *, as_of_date, content, source="local"):
self.calls.append(("upsert_story_cache", symbol, as_of_date, source))
def delete_story_cache(self, symbol, *, as_of_date=None):
self.calls.append(("delete_story_cache", symbol, as_of_date))
return 1
def get_similar_day_cache(self, symbol, *, target_date):
self.calls.append(("get_similar_day_cache", symbol, target_date))
return None
def upsert_similar_day_cache(self, symbol, *, target_date, payload, source="local"):
self.calls.append(("upsert_similar_day_cache", symbol, target_date, source))
def delete_similar_day_cache(self, symbol, *, target_date=None):
self.calls.append(("delete_similar_day_cache", symbol, target_date))
return 1
def get_ohlc(self, symbol, start_date, end_date):
self.calls.append(("get_ohlc", symbol, start_date, end_date))
return [
{"date": start_date, "open": 100, "high": 105, "low": 99, "close": 103},
{"date": end_date, "open": 103, "high": 108, "low": 102, "close": 107},
]
def make_gateway(market_store=None):
storage = SimpleNamespace(market_store=market_store or FakeMarketStore())
pipeline = SimpleNamespace(state_sync=None)
market_service = SimpleNamespace()
state_sync = DummyStateSync()
return Gateway(
market_service=market_service,
storage_service=storage,
pipeline=pipeline,
state_sync=state_sync,
config={"mode": "live"},
)
@pytest.mark.asyncio
async def test_handle_get_stock_news_timeline_uses_market_store_symbol_argument():
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
await gateway._handle_get_stock_news_timeline(
websocket,
{"ticker": "AAPL", "lookback_days": 30},
)
assert market_store.calls == [
("get_news_timeline_enriched", "AAPL", "2026-02-14", "2026-03-16")
]
assert websocket.messages[-1]["type"] == "stock_news_timeline_loaded"
assert websocket.messages[-1]["ticker"] == "AAPL"
@pytest.mark.asyncio
async def test_handle_get_stock_news_categories_uses_market_store_symbol_argument(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
await gateway._handle_get_stock_news_categories(
websocket,
{"ticker": "AAPL", "lookback_days": 30},
)
assert market_store.calls == [
("get_news_items_enriched", "AAPL", "2026-02-14", "2026-03-16", None, 200),
("get_news_categories_enriched", "AAPL", "2026-02-14", "2026-03-16", 200)
]
assert websocket.messages[-1]["type"] == "stock_news_categories_loaded"
assert websocket.messages[-1]["categories"]["macro"]["count"] == 1
@pytest.mark.asyncio
async def test_handle_get_stock_range_explain_uses_market_store_rows(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
def fake_build_range_explanation(*, ticker, start_date, end_date, news_rows):
return {
"ticker": ticker,
"start_date": start_date,
"end_date": end_date,
"news_count": len(news_rows),
}
monkeypatch.setattr(
gateway_module,
"build_range_explanation",
fake_build_range_explanation,
)
await gateway._handle_get_stock_range_explain(
websocket,
{"ticker": "AAPL", "start_date": "2026-03-10", "end_date": "2026-03-16"},
)
assert market_store.calls == [
("get_news_items_enriched", "AAPL", "2026-03-10", "2026-03-16", None, 100)
]
assert websocket.messages[-1] == {
"type": "stock_range_explain_loaded",
"ticker": "AAPL",
"result": {
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"news_count": 1,
},
}
@pytest.mark.asyncio
async def test_handle_get_stock_range_explain_uses_article_ids_path(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"build_range_explanation",
lambda **kwargs: {"news_count": len(kwargs["news_rows"])},
)
await gateway._handle_get_stock_range_explain(
websocket,
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"article_ids": ["news-99"],
},
)
assert market_store.calls == [("get_news_by_ids_enriched", "AAPL", ["news-99"])]
assert websocket.messages[-1]["result"]["news_count"] == 1
@pytest.mark.asyncio
async def test_handle_get_stock_news_for_date_uses_trade_date_lookup():
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
await gateway._handle_get_stock_news_for_date(
websocket,
{"ticker": "AAPL", "date": "2026-03-16", "limit": 10},
)
assert market_store.calls == [
("get_news_items_enriched", "AAPL", None, None, "2026-03-16", 10)
]
assert websocket.messages[-1]["type"] == "stock_news_for_date_loaded"
assert websocket.messages[-1]["date"] == "2026-03-16"
@pytest.mark.asyncio
async def test_handle_get_stock_story_returns_story_payload(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"enrich_news_for_symbol",
lambda *args, **kwargs: {"symbol": "AAPL", "analyzed": 3},
)
await gateway._handle_get_stock_story(
websocket,
{"ticker": "AAPL", "as_of_date": "2026-03-16"},
)
assert websocket.messages[-1]["type"] == "stock_story_loaded"
assert websocket.messages[-1]["ticker"] == "AAPL"
assert "AAPL Story" in websocket.messages[-1]["story"]
@pytest.mark.asyncio
async def test_handle_get_stock_similar_days_returns_items(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"enrich_news_for_symbol",
lambda *args, **kwargs: {"symbol": "AAPL", "analyzed": 3},
)
await gateway._handle_get_stock_similar_days(
websocket,
{"ticker": "AAPL", "date": "2026-03-16", "top_k": 5},
)
assert websocket.messages[-1]["type"] == "stock_similar_days_loaded"
assert websocket.messages[-1]["ticker"] == "AAPL"
assert isinstance(websocket.messages[-1]["items"], list)
@pytest.mark.asyncio
async def test_handle_run_stock_enrich_rebuilds_caches(monkeypatch):
market_store = FakeMarketStore()
gateway = make_gateway(market_store)
websocket = DummyWebSocket()
monkeypatch.setattr(
gateway_module,
"enrich_news_for_symbol",
lambda *args, **kwargs: {"symbol": "AAPL", "analyzed": 2, "queued_count": 2},
)
await gateway._handle_run_stock_enrich(
websocket,
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"force": True,
"rebuild_story": True,
"rebuild_similar_days": True,
"story_date": "2026-03-16",
"target_date": "2026-03-16",
},
)
assert ("delete_story_cache", "AAPL", "2026-03-16") in market_store.calls
assert ("delete_similar_day_cache", "AAPL", "2026-03-16") in market_store.calls
assert websocket.messages[-1]["type"] == "stock_enrich_completed"
assert websocket.messages[-1]["stats"]["analyzed"] == 2
@pytest.mark.asyncio
async def test_handle_run_stock_enrich_rejects_local_to_llm_without_llm(monkeypatch):
gateway = make_gateway(FakeMarketStore())
websocket = DummyWebSocket()
monkeypatch.setattr(gateway_module, "llm_enrichment_enabled", lambda: False)
await gateway._handle_run_stock_enrich(
websocket,
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"only_local_to_llm": True,
},
)
assert websocket.messages[-1]["type"] == "stock_enrich_completed"
assert "requires EXPLAIN_ENRICH_USE_LLM=true" in websocket.messages[-1]["error"]
def test_schedule_watchlist_market_store_refresh_creates_task(monkeypatch):
gateway = make_gateway()
captured = {}
class DummyTask:
def done(self):
return False
def cancel(self):
captured["cancelled"] = True
def fake_create_task(coro):
captured["coro_name"] = coro.cr_code.co_name
coro.close()
return DummyTask()
monkeypatch.setattr(gateway_module.asyncio, "create_task", fake_create_task)
gateway._schedule_watchlist_market_store_refresh(["AAPL", "MSFT"])
assert captured["coro_name"] == "_refresh_market_store_for_watchlist"
@pytest.mark.asyncio
async def test_refresh_market_store_for_watchlist_emits_system_messages(monkeypatch):
gateway = make_gateway()
monkeypatch.setattr(
gateway_module,
"ingest_symbols",
lambda symbols, mode="incremental": [
{"symbol": symbol, "prices": 3, "news": 4, "aligned": 4}
for symbol in symbols
],
)
await gateway._refresh_market_store_for_watchlist(["AAPL", "MSFT"])
assert gateway.state_sync.system_messages[0] == "正在同步自选股市场数据: AAPL, MSFT"
assert "自选股市场数据已同步:" in gateway.state_sync.system_messages[1]
assert "AAPL prices=3 news=4" in gateway.state_sync.system_messages[1]

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
from unittest.mock import patch
import pandas as pd
from backend.data.historical_price_manager import HistoricalPriceManager
def test_preload_data_prefers_market_db():
manager = HistoricalPriceManager()
manager.subscribe(["AAPL"])
market_rows = [
{
"symbol": "AAPL",
"date": "2026-03-09",
"open": 100.0,
"high": 103.0,
"low": 99.0,
"close": 102.0,
"volume": 10_000,
"vwap": 101.0,
"transactions": 500,
"source": "polygon",
}
]
with (
patch.object(manager._market_store, "get_ohlc", return_value=market_rows),
patch.object(manager._router, "load_local_price_frame") as load_csv,
):
manager.preload_data("2026-03-01", "2026-03-10")
load_csv.assert_not_called()
assert "AAPL" in manager._price_cache
assert float(manager._price_cache["AAPL"].iloc[0]["close"]) == 102.0
def test_preload_data_falls_back_to_csv():
manager = HistoricalPriceManager()
manager.subscribe(["MSFT"])
csv_df = pd.DataFrame(
{
"time": ["2026-03-09"],
"open": [200.0],
"high": [205.0],
"low": [198.0],
"close": [204.0],
"volume": [20_000],
}
)
csv_df["time"] = pd.to_datetime(csv_df["time"])
csv_df["Date"] = csv_df["time"]
csv_df.set_index("Date", inplace=True)
with (
patch.object(manager._market_store, "get_ohlc", return_value=[]),
patch.object(manager._router, "load_local_price_frame", return_value=csv_df) as load_csv,
):
manager.preload_data("2026-03-01", "2026-03-10")
load_csv.assert_called_once_with("MSFT")
assert "MSFT" in manager._price_cache
assert float(manager._price_cache["MSFT"].iloc[0]["close"]) == 204.0

View File

@@ -0,0 +1,133 @@
# -*- coding: utf-8 -*-
from backend.enrich import llm_enricher
class DummyResponse:
def __init__(self, metadata):
self.metadata = metadata
class DummyModel:
def __init__(self, metadata):
self.metadata = metadata
self.calls = []
async def __call__(self, messages, structured_model=None, **kwargs):
self.calls.append(
{
"messages": messages,
"structured_model": structured_model,
"kwargs": kwargs,
}
)
return DummyResponse(self.metadata)
def test_analyze_news_row_with_llm_uses_agentscope_model(monkeypatch):
model = DummyModel(
{
"id": "news-1",
"relevance": "high",
"sentiment": "positive",
"key_discussion": "Demand remains resilient",
"summary": "Structured summary",
"reason_growth": "Orders improved",
"reason_decrease": "",
}
)
monkeypatch.setattr(llm_enricher, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(llm_enricher, "_get_explain_model", lambda: model)
monkeypatch.setattr(
llm_enricher,
"get_explain_model_info",
lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"},
)
result = llm_enricher.analyze_news_row_with_llm(
{
"id": "news-1",
"title": "Apple expands AI features",
"summary": "New devices and software updates were announced.",
}
)
assert result["sentiment"] == "positive"
assert result["summary"] == "Structured summary"
assert result["raw_json"]["model_label"] == "DASHSCOPE:qwen-max"
assert model.calls
assert model.calls[0]["structured_model"] is llm_enricher.EnrichedNewsItem
def test_analyze_news_rows_with_llm_uses_agentscope_structured_batch(monkeypatch):
model = DummyModel(
{
"items": [
{
"id": "news-1",
"relevance": "high",
"sentiment": "negative",
"key_discussion": "Margin pressure",
"summary": "Batch summary",
"reason_growth": "",
"reason_decrease": "Costs rose",
}
]
}
)
monkeypatch.setattr(llm_enricher, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(llm_enricher, "_get_explain_model", lambda: model)
monkeypatch.setattr(
llm_enricher,
"get_explain_model_info",
lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"},
)
result = llm_enricher.analyze_news_rows_with_llm(
[
{
"id": "news-1",
"title": "Apple margins pressured",
"summary": "Costs increased this quarter.",
}
]
)
assert result["news-1"]["sentiment"] == "negative"
assert result["news-1"]["reason_decrease"] == "Costs rose"
assert result["news-1"]["raw_json"]["model_label"] == "DASHSCOPE:qwen-max"
assert model.calls
assert model.calls[0]["structured_model"] is llm_enricher.EnrichedNewsBatch
def test_analyze_range_with_llm_uses_agentscope_structured_output(monkeypatch):
model = DummyModel(
{
"summary": "该股在区间内震荡下行,相关新闻主要集中在盈利预期和供应链扰动。",
"trend_analysis": "前半段受利空新闻压制,后半段跌幅收敛。",
"bullish_factors": ["估值消化后出现部分承接"],
"bearish_factors": ["盈利预期下修", "供应链扰动持续"],
}
)
monkeypatch.setattr(llm_enricher, "llm_range_analysis_enabled", lambda: True)
monkeypatch.setattr(llm_enricher, "_get_explain_model", lambda: model)
monkeypatch.setattr(
llm_enricher,
"get_explain_model_info",
lambda: {"provider": "DASHSCOPE", "model_name": "qwen-max", "label": "DASHSCOPE:qwen-max"},
)
result = llm_enricher.analyze_range_with_llm(
{
"ticker": "AAPL",
"start_date": "2026-03-10",
"end_date": "2026-03-16",
"price_change_pct": -3.42,
}
)
assert result["summary"].startswith("该股在区间内震荡下行")
assert result["model_label"] == "DASHSCOPE:qwen-max"
assert result["bearish_factors"] == ["盈利预期下修", "供应链扰动持续"]
assert model.calls
assert model.calls[0]["structured_model"] is llm_enricher.RangeAnalysisPayload

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
from pathlib import Path
from backend.data.market_store import MarketStore
def test_get_enrich_report_summarizes_coverage(tmp_path: Path):
store = MarketStore(tmp_path / "market_research.db")
store.upsert_news(
"AAPL",
[
{
"id": "news-1",
"published_utc": "2026-03-10T12:00:00Z",
"title": "Apple earnings beat",
"summary": "Revenue topped expectations",
"tickers": ["AAPL"],
},
{
"id": "news-2",
"published_utc": "2026-03-11T12:00:00Z",
"title": "Apple supply chain warning",
"summary": "Outlook softened",
"tickers": ["AAPL"],
},
],
)
store.set_trade_dates(
[
{"news_id": "news-1", "symbol": "AAPL", "trade_date": "2026-03-10"},
{"news_id": "news-2", "symbol": "AAPL", "trade_date": "2026-03-11"},
]
)
store.upsert_news_analysis(
"AAPL",
[
{
"news_id": "news-1",
"trade_date": "2026-03-10",
"summary": "LLM enriched",
"analysis_source": "llm",
}
],
analysis_source="llm",
)
rows = store.get_enrich_report(["AAPL"])
assert len(rows) == 1
assert rows[0]["symbol"] == "AAPL"
assert rows[0]["raw_news_count"] == 2
assert rows[0]["analyzed_news_count"] == 1
assert rows[0]["coverage_pct"] == 50.0
assert rows[0]["llm_count"] == 1

View File

@@ -0,0 +1,174 @@
# -*- coding: utf-8 -*-
from backend.enrich import news_enricher
def test_classify_news_row_falls_back_to_local_rules(monkeypatch):
monkeypatch.setattr(news_enricher, "analyze_news_row_with_llm", lambda row: None)
result = news_enricher.classify_news_row(
{
"title": "Apple shares drop after weak guidance",
"summary": "Investors reacted negatively to softer-than-expected outlook.",
}
)
assert result["analysis_source"] == "local"
assert result["sentiment"] == "negative"
assert result["summary"]
def test_classify_news_row_prefers_llm_when_available(monkeypatch):
monkeypatch.setattr(
news_enricher,
"analyze_news_row_with_llm",
lambda row: {
"relevance": "high",
"sentiment": "positive",
"key_discussion": "Demand resilience",
"summary": "LLM summary",
"reason_growth": "Orders remain strong",
"reason_decrease": "",
"raw_json": {"provider": "llm"},
},
)
result = news_enricher.classify_news_row(
{
"title": "Apple expands AI features",
"summary": "New devices and software updates were announced.",
}
)
assert result["analysis_source"] == "llm"
assert result["sentiment"] == "positive"
assert result["summary"] == "LLM summary"
def test_build_analysis_rows_prefers_batch_llm_and_dedupes(monkeypatch):
monkeypatch.setattr(news_enricher, "llm_enrichment_enabled", lambda: True)
monkeypatch.setattr(news_enricher, "get_env_int", lambda key, default=0: 8)
monkeypatch.setattr(
news_enricher,
"analyze_news_rows_with_llm",
lambda rows: {
"news-1": {
"relevance": "high",
"sentiment": "positive",
"key_discussion": "Batch result",
"summary": "Batch summary",
"reason_growth": "Growth",
"reason_decrease": "",
"raw_json": {"provider": "batch"},
}
},
)
monkeypatch.setattr(news_enricher, "analyze_news_row_with_llm", lambda row: None)
rows = news_enricher.build_analysis_rows(
symbol="AAPL",
news_rows=[
{"id": "news-1", "trade_date": "2026-03-10", "title": "Same title", "summary": "Same summary"},
{"id": "news-2", "trade_date": "2026-03-10", "title": "Same title", "summary": "Same summary"},
],
ohlc_rows=[],
)
rows, stats = rows
assert len(rows) == 1
assert rows[0]["analysis_source"] == "llm"
assert rows[0]["summary"] == "Batch summary"
assert stats["deduped_count"] == 1
assert stats["llm_count"] == 1
def test_enrich_news_for_symbol_skips_existing(monkeypatch):
class DummyStore:
def get_news_items(self, symbol, start_date=None, end_date=None, limit=200):
return [
{"id": "news-1", "trade_date": "2026-03-10", "title": "One", "summary": "One"},
{"id": "news-2", "trade_date": "2026-03-11", "title": "Two", "summary": "Two"},
]
def get_analyzed_news_ids(self, symbol, start_date=None, end_date=None):
return {"news-1"}
def get_ohlc(self, symbol, start_date, end_date):
return []
def upsert_news_analysis(self, symbol, rows, analysis_source="local"):
self.rows = rows
return len(rows)
monkeypatch.setattr(
news_enricher,
"build_analysis_rows",
lambda symbol, news_rows, ohlc_rows: (
[
{
"news_id": row["id"],
"trade_date": row["trade_date"],
"summary": row["summary"],
"analysis_source": "local",
}
for row in news_rows
],
{"deduped_count": 0, "llm_count": 0, "local_count": len(news_rows)},
),
)
store = DummyStore()
result = news_enricher.enrich_news_for_symbol(store, "AAPL")
assert result["news_count"] == 2
assert result["queued_count"] == 1
assert result["skipped_existing_count"] == 1
assert len(store.rows) == 1
assert store.rows[0]["news_id"] == "news-2"
def test_enrich_news_for_symbol_only_reanalyzes_local(monkeypatch):
class DummyStore:
def get_news_items(self, symbol, start_date=None, end_date=None, limit=200):
return [
{"id": "news-1", "trade_date": "2026-03-10", "title": "One", "summary": "One"},
{"id": "news-2", "trade_date": "2026-03-11", "title": "Two", "summary": "Two"},
{"id": "news-3", "trade_date": "2026-03-12", "title": "Three", "summary": "Three"},
]
def get_analyzed_news_sources(self, symbol, start_date=None, end_date=None):
return {"news-1": "local", "news-2": "llm"}
def get_ohlc(self, symbol, start_date, end_date):
return []
def upsert_news_analysis(self, symbol, rows, analysis_source="local"):
self.rows = rows
return len(rows)
monkeypatch.setattr(
news_enricher,
"build_analysis_rows",
lambda symbol, news_rows, ohlc_rows: (
[
{
"news_id": row["id"],
"trade_date": row["trade_date"],
"summary": row["summary"],
"analysis_source": "llm" if row["id"] == "news-1" else "local",
}
for row in news_rows
],
{"deduped_count": 0, "llm_count": 1, "local_count": 0},
),
)
store = DummyStore()
result = news_enricher.enrich_news_for_symbol(
store,
"AAPL",
only_reanalyze_local=True,
)
assert result["news_count"] == 3
assert result["queued_count"] == 1
assert result["skipped_existing_count"] == 2
assert result["only_reanalyze_local"] is True
assert result["upgraded_local_to_llm_count"] == 1
assert result["execution_summary"]["upgraded_dates"] == ["2026-03-10"]
assert result["execution_summary"]["remaining_local_titles"] == []
assert result["execution_summary"]["skipped_missing_analysis_count"] == 1
assert result["execution_summary"]["skipped_non_local_count"] == 1
assert [row["news_id"] for row in store.rows] == ["news-1"]

View File

@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
from types import SimpleNamespace
from backend.explain import range_explainer
def test_build_range_explanation_prefers_llm_text_when_available(monkeypatch):
monkeypatch.setattr(
range_explainer,
"get_prices",
lambda ticker, start_date, end_date: [
SimpleNamespace(open=100, close=98, high=102, low=97, volume=1000),
SimpleNamespace(open=98, close=96, high=99, low=95, volume=1100),
SimpleNamespace(open=96, close=97, high=98, low=94, volume=1200),
],
)
monkeypatch.setattr(
range_explainer,
"analyze_range_with_llm",
lambda payload: {
"summary": "区间内整体偏弱,主题集中在盈利预期和供应链风险。",
"trend_analysis": "前半段快速下探,后半段出现修复。",
"bullish_factors": ["回调后出现承接"],
"bearish_factors": ["盈利预期承压"],
"model_label": "DASHSCOPE:qwen-max",
},
)
result = range_explainer.build_range_explanation(
ticker="AAPL",
start_date="2026-03-10",
end_date="2026-03-16",
news_rows=[
{
"id": "news-1",
"trade_date": "2026-03-10",
"title": "Apple margin pressure concerns grow",
"summary": "Investors focused on weaker margin outlook.",
"sentiment": "negative",
"relevance": "high",
"ret_t0": -0.02,
"reason_decrease": "盈利预期承压",
"category": "earnings",
}
],
)
assert result["analysis"]["summary"] == "区间内整体偏弱,主题集中在盈利预期和供应链风险。"
assert result["analysis"]["trend_analysis"] == "前半段快速下探,后半段出现修复。"
assert result["analysis"]["bullish_factors"] == ["回调后出现承接"]
assert result["analysis"]["analysis_source"] == "llm"
assert result["analysis"]["analysis_model_label"] == "DASHSCOPE:qwen-max"
assert result["news_count"] == 1