feat: 微服务架构拆分和前后端优化
后端: - 拆分出 agent_service, runtime_service, trading_service, news_service - Gateway 模块化拆分 (gateway_*.py) - 添加 domains/ 领域层 - 新增 control_client, runtime_client - 更新 start-dev.sh 支持 split 服务模式 前端: - 完善 API 服务层 (newsApi, tradingApi) - 更新 vite.config.js - Explain 组件优化 测试: - 添加多个服务 app 测试 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
171
backend/tests/test_news_domain.py
Normal file
171
backend/tests/test_news_domain.py
Normal file
@@ -0,0 +1,171 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for the news domain helpers."""
|
||||
|
||||
from backend.domains import news as news_domain
|
||||
|
||||
|
||||
class _FakeStore:
|
||||
def __init__(self):
|
||||
self.calls = []
|
||||
|
||||
def get_ticker_watermarks(self, symbol):
|
||||
self.calls.append(("get_ticker_watermarks", symbol))
|
||||
return {"symbol": symbol, "last_news_fetch": "2026-03-10"}
|
||||
|
||||
def get_news_items_enriched(self, ticker, start_date=None, end_date=None, trade_date=None, limit=100):
|
||||
self.calls.append(("get_news_items_enriched", ticker, start_date, end_date, trade_date, limit))
|
||||
target = trade_date or end_date
|
||||
return [{"id": "n1", "ticker": ticker, "date": target, "trade_date": target}]
|
||||
|
||||
def get_news_timeline_enriched(self, ticker, start_date=None, end_date=None):
|
||||
self.calls.append(("get_news_timeline_enriched", ticker, start_date, end_date))
|
||||
return [{"date": end_date, "count": 1}]
|
||||
|
||||
def get_news_categories_enriched(self, ticker, start_date=None, end_date=None, limit=200):
|
||||
self.calls.append(("get_news_categories_enriched", ticker, start_date, end_date, limit))
|
||||
return {"macro": {"count": 1}}
|
||||
|
||||
def get_news_by_ids_enriched(self, ticker, article_ids):
|
||||
self.calls.append(("get_news_by_ids_enriched", ticker, list(article_ids)))
|
||||
return [{"id": article_ids[0], "ticker": ticker, "date": "2026-03-16"}]
|
||||
|
||||
|
||||
def test_news_rows_need_enrichment_detects_missing_fields():
|
||||
assert news_domain.news_rows_need_enrichment([]) is True
|
||||
assert news_domain.news_rows_need_enrichment([{"sentiment": "", "relevance": "", "key_discussion": ""}]) is True
|
||||
assert news_domain.news_rows_need_enrichment([{"sentiment": "positive"}]) is False
|
||||
|
||||
|
||||
def test_ensure_news_fresh_triggers_incremental_refresh_when_watermark_is_stale(monkeypatch):
|
||||
store = _FakeStore()
|
||||
calls = []
|
||||
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"update_ticker_incremental",
|
||||
lambda symbol, end_date=None, store=None: calls.append((symbol, end_date)),
|
||||
)
|
||||
|
||||
payload = news_domain.ensure_news_fresh(store, ticker="AAPL", target_date="2026-03-16")
|
||||
|
||||
assert calls == [("AAPL", "2026-03-16")]
|
||||
assert payload["target_date"] == "2026-03-16"
|
||||
assert payload["refreshed"] is True
|
||||
|
||||
|
||||
def test_ensure_news_fresh_skips_refresh_when_watermark_is_current(monkeypatch):
|
||||
store = _FakeStore()
|
||||
calls = []
|
||||
|
||||
monkeypatch.setattr(
|
||||
store,
|
||||
"get_ticker_watermarks",
|
||||
lambda symbol: {"symbol": symbol, "last_news_fetch": "2026-03-16"},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"update_ticker_incremental",
|
||||
lambda symbol, end_date=None, store=None: calls.append((symbol, end_date)),
|
||||
)
|
||||
|
||||
payload = news_domain.ensure_news_fresh(store, ticker="AAPL", target_date="2026-03-16")
|
||||
|
||||
assert calls == []
|
||||
assert payload["refreshed"] is False
|
||||
|
||||
|
||||
def test_get_enriched_news_returns_rows_without_enrichment_when_present(monkeypatch):
|
||||
store = _FakeStore()
|
||||
monkeypatch.setattr(news_domain, "news_rows_need_enrichment", lambda rows: False)
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"ensure_news_fresh",
|
||||
lambda store, ticker, target_date=None: {
|
||||
"ticker": ticker,
|
||||
"target_date": target_date,
|
||||
"last_news_fetch": target_date,
|
||||
"refreshed": False,
|
||||
},
|
||||
)
|
||||
|
||||
payload = news_domain.get_enriched_news(
|
||||
store,
|
||||
ticker="AAPL",
|
||||
start_date="2026-03-01",
|
||||
end_date="2026-03-16",
|
||||
limit=20,
|
||||
)
|
||||
|
||||
assert payload["ticker"] == "AAPL"
|
||||
assert payload["news"][0]["ticker"] == "AAPL"
|
||||
assert payload["freshness"]["target_date"] is None or payload["freshness"]["target_date"] == "2026-03-16"
|
||||
assert store.calls == [
|
||||
("get_news_items_enriched", "AAPL", "2026-03-01", "2026-03-16", None, 20)
|
||||
]
|
||||
|
||||
|
||||
def test_get_story_and_similar_days_delegate(monkeypatch):
|
||||
store = _FakeStore()
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"ensure_news_fresh",
|
||||
lambda store, ticker, target_date=None: {
|
||||
"ticker": ticker,
|
||||
"target_date": target_date,
|
||||
"last_news_fetch": target_date,
|
||||
"refreshed": False,
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(news_domain, "enrich_news_for_symbol", lambda *args, **kwargs: {"analyzed": 1})
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"get_or_create_stock_story",
|
||||
lambda store, symbol, as_of_date: {"symbol": symbol, "as_of_date": as_of_date, "story": "story"},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"find_similar_days",
|
||||
lambda store, symbol, target_date, top_k: {"symbol": symbol, "target_date": target_date, "items": [{"score": 0.9}]},
|
||||
)
|
||||
|
||||
story = news_domain.get_story_payload(store, ticker="AAPL", as_of_date="2026-03-16")
|
||||
similar = news_domain.get_similar_days_payload(store, ticker="AAPL", date="2026-03-16", n_similar=8)
|
||||
|
||||
assert story["story"] == "story"
|
||||
assert "freshness" in story
|
||||
assert similar["items"][0]["score"] == 0.9
|
||||
assert "freshness" in similar
|
||||
|
||||
|
||||
def test_get_range_explain_payload_uses_article_ids(monkeypatch):
|
||||
store = _FakeStore()
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"ensure_news_fresh",
|
||||
lambda store, ticker, target_date=None: {
|
||||
"ticker": ticker,
|
||||
"target_date": target_date,
|
||||
"last_news_fetch": target_date,
|
||||
"refreshed": False,
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(news_domain, "news_rows_need_enrichment", lambda rows: False)
|
||||
monkeypatch.setattr(
|
||||
news_domain,
|
||||
"build_range_explanation",
|
||||
lambda ticker, start_date, end_date, news_rows: {"ticker": ticker, "count": len(news_rows)},
|
||||
)
|
||||
|
||||
payload = news_domain.get_range_explain_payload(
|
||||
store,
|
||||
ticker="AAPL",
|
||||
start_date="2026-03-10",
|
||||
end_date="2026-03-16",
|
||||
article_ids=["news-9"],
|
||||
limit=50,
|
||||
)
|
||||
|
||||
assert payload["ticker"] == "AAPL"
|
||||
assert payload["result"] == {"ticker": "AAPL", "count": 1}
|
||||
assert "freshness" in payload
|
||||
assert store.calls == [("get_news_by_ids_enriched", "AAPL", ["news-9"])]
|
||||
Reference in New Issue
Block a user