175 lines
6.3 KiB
Python
175 lines
6.3 KiB
Python
# -*- coding: utf-8 -*-
|
|
|
|
from backend.enrich import news_enricher
|
|
|
|
|
|
def test_classify_news_row_falls_back_to_local_rules(monkeypatch):
|
|
monkeypatch.setattr(news_enricher, "analyze_news_row_with_llm", lambda row: None)
|
|
result = news_enricher.classify_news_row(
|
|
{
|
|
"title": "Apple shares drop after weak guidance",
|
|
"summary": "Investors reacted negatively to softer-than-expected outlook.",
|
|
}
|
|
)
|
|
assert result["analysis_source"] == "local"
|
|
assert result["sentiment"] == "negative"
|
|
assert result["summary"]
|
|
|
|
|
|
def test_classify_news_row_prefers_llm_when_available(monkeypatch):
|
|
monkeypatch.setattr(
|
|
news_enricher,
|
|
"analyze_news_row_with_llm",
|
|
lambda row: {
|
|
"relevance": "high",
|
|
"sentiment": "positive",
|
|
"key_discussion": "Demand resilience",
|
|
"summary": "LLM summary",
|
|
"reason_growth": "Orders remain strong",
|
|
"reason_decrease": "",
|
|
"raw_json": {"provider": "llm"},
|
|
},
|
|
)
|
|
result = news_enricher.classify_news_row(
|
|
{
|
|
"title": "Apple expands AI features",
|
|
"summary": "New devices and software updates were announced.",
|
|
}
|
|
)
|
|
assert result["analysis_source"] == "llm"
|
|
assert result["sentiment"] == "positive"
|
|
assert result["summary"] == "LLM summary"
|
|
|
|
|
|
def test_build_analysis_rows_prefers_batch_llm_and_dedupes(monkeypatch):
|
|
monkeypatch.setattr(news_enricher, "llm_enrichment_enabled", lambda: True)
|
|
monkeypatch.setattr(news_enricher, "get_env_int", lambda key, default=0: 8)
|
|
monkeypatch.setattr(
|
|
news_enricher,
|
|
"analyze_news_rows_with_llm",
|
|
lambda rows: {
|
|
"news-1": {
|
|
"relevance": "high",
|
|
"sentiment": "positive",
|
|
"key_discussion": "Batch result",
|
|
"summary": "Batch summary",
|
|
"reason_growth": "Growth",
|
|
"reason_decrease": "",
|
|
"raw_json": {"provider": "batch"},
|
|
}
|
|
},
|
|
)
|
|
monkeypatch.setattr(news_enricher, "analyze_news_row_with_llm", lambda row: None)
|
|
rows = news_enricher.build_analysis_rows(
|
|
symbol="AAPL",
|
|
news_rows=[
|
|
{"id": "news-1", "trade_date": "2026-03-10", "title": "Same title", "summary": "Same summary"},
|
|
{"id": "news-2", "trade_date": "2026-03-10", "title": "Same title", "summary": "Same summary"},
|
|
],
|
|
ohlc_rows=[],
|
|
)
|
|
rows, stats = rows
|
|
assert len(rows) == 1
|
|
assert rows[0]["analysis_source"] == "llm"
|
|
assert rows[0]["summary"] == "Batch summary"
|
|
assert stats["deduped_count"] == 1
|
|
assert stats["llm_count"] == 1
|
|
|
|
|
|
def test_enrich_news_for_symbol_skips_existing(monkeypatch):
|
|
class DummyStore:
|
|
def get_news_items(self, symbol, start_date=None, end_date=None, limit=200):
|
|
return [
|
|
{"id": "news-1", "trade_date": "2026-03-10", "title": "One", "summary": "One"},
|
|
{"id": "news-2", "trade_date": "2026-03-11", "title": "Two", "summary": "Two"},
|
|
]
|
|
|
|
def get_analyzed_news_ids(self, symbol, start_date=None, end_date=None):
|
|
return {"news-1"}
|
|
|
|
def get_ohlc(self, symbol, start_date, end_date):
|
|
return []
|
|
|
|
def upsert_news_analysis(self, symbol, rows, analysis_source="local"):
|
|
self.rows = rows
|
|
return len(rows)
|
|
|
|
monkeypatch.setattr(
|
|
news_enricher,
|
|
"build_analysis_rows",
|
|
lambda symbol, news_rows, ohlc_rows: (
|
|
[
|
|
{
|
|
"news_id": row["id"],
|
|
"trade_date": row["trade_date"],
|
|
"summary": row["summary"],
|
|
"analysis_source": "local",
|
|
}
|
|
for row in news_rows
|
|
],
|
|
{"deduped_count": 0, "llm_count": 0, "local_count": len(news_rows)},
|
|
),
|
|
)
|
|
store = DummyStore()
|
|
result = news_enricher.enrich_news_for_symbol(store, "AAPL")
|
|
assert result["news_count"] == 2
|
|
assert result["queued_count"] == 1
|
|
assert result["skipped_existing_count"] == 1
|
|
assert len(store.rows) == 1
|
|
assert store.rows[0]["news_id"] == "news-2"
|
|
|
|
|
|
def test_enrich_news_for_symbol_only_reanalyzes_local(monkeypatch):
|
|
class DummyStore:
|
|
def get_news_items(self, symbol, start_date=None, end_date=None, limit=200):
|
|
return [
|
|
{"id": "news-1", "trade_date": "2026-03-10", "title": "One", "summary": "One"},
|
|
{"id": "news-2", "trade_date": "2026-03-11", "title": "Two", "summary": "Two"},
|
|
{"id": "news-3", "trade_date": "2026-03-12", "title": "Three", "summary": "Three"},
|
|
]
|
|
|
|
def get_analyzed_news_sources(self, symbol, start_date=None, end_date=None):
|
|
return {"news-1": "local", "news-2": "llm"}
|
|
|
|
def get_ohlc(self, symbol, start_date, end_date):
|
|
return []
|
|
|
|
def upsert_news_analysis(self, symbol, rows, analysis_source="local"):
|
|
self.rows = rows
|
|
return len(rows)
|
|
|
|
monkeypatch.setattr(
|
|
news_enricher,
|
|
"build_analysis_rows",
|
|
lambda symbol, news_rows, ohlc_rows: (
|
|
[
|
|
{
|
|
"news_id": row["id"],
|
|
"trade_date": row["trade_date"],
|
|
"summary": row["summary"],
|
|
"analysis_source": "llm" if row["id"] == "news-1" else "local",
|
|
}
|
|
for row in news_rows
|
|
],
|
|
{"deduped_count": 0, "llm_count": 1, "local_count": 0},
|
|
),
|
|
)
|
|
|
|
store = DummyStore()
|
|
result = news_enricher.enrich_news_for_symbol(
|
|
store,
|
|
"AAPL",
|
|
only_reanalyze_local=True,
|
|
)
|
|
|
|
assert result["news_count"] == 3
|
|
assert result["queued_count"] == 1
|
|
assert result["skipped_existing_count"] == 2
|
|
assert result["only_reanalyze_local"] is True
|
|
assert result["upgraded_local_to_llm_count"] == 1
|
|
assert result["execution_summary"]["upgraded_dates"] == ["2026-03-10"]
|
|
assert result["execution_summary"]["remaining_local_titles"] == []
|
|
assert result["execution_summary"]["skipped_missing_analysis_count"] == 1
|
|
assert result["execution_summary"]["skipped_non_local_count"] == 1
|
|
assert [row["news_id"] for row in store.rows] == ["news-1"]
|