确认PokieTicker新闻库数据源

This commit is contained in:
2026-03-16 02:19:25 +08:00
parent 78f133617f
commit 564c92c0c8
182 changed files with 6436 additions and 1050 deletions

View File

@@ -7,6 +7,7 @@ Returns human-readable text format for easy LLM consumption.
"""
# flake8: noqa: E501
# pylint: disable=C0301,W0613
import ast
import json
import logging
import traceback
@@ -20,6 +21,7 @@ import pandas as pd
from agentscope.message import TextBlock
from agentscope.tool import ToolResponse
from backend.data.provider_utils import normalize_symbol
from backend.tools.data_tools import (
get_company_news,
get_financial_metrics,
@@ -53,6 +55,16 @@ def _parse_tickers(tickers: Union[str, List[str], None]) -> List[str]:
Returns:
List of stock tickers.
"""
def _sanitize(values: List[object]) -> List[str]:
cleaned: List[str] = []
for value in values:
if value is None:
continue
symbol = normalize_symbol(str(value).strip().strip("\"'"))
if symbol and symbol not in cleaned:
cleaned.append(symbol)
return cleaned
if tickers is None:
return []
@@ -60,17 +72,22 @@ def _parse_tickers(tickers: Union[str, List[str], None]) -> List[str]:
try:
parsed = json.loads(tickers)
if isinstance(parsed, list):
return parsed
# If it's a single string, wrap in list
return [parsed]
return _sanitize(parsed)
return _sanitize([parsed])
except json.JSONDecodeError:
# If not valid JSON, treat as comma-separated string
return [t.strip() for t in tickers.split(",") if t.strip()]
try:
parsed = ast.literal_eval(tickers)
if isinstance(parsed, list):
return _sanitize(parsed)
return _sanitize([parsed])
except (SyntaxError, ValueError):
pass
return _sanitize(tickers.split(","))
if isinstance(tickers, list):
return tickers
return _sanitize(tickers)
return []
return _sanitize([tickers])
def _safe_float(value, default=0.0) -> float:
@@ -350,6 +367,7 @@ def get_financial_metrics_tool(
"""
current_date = _resolved_date(current_date)
tickers = _parse_tickers(tickers)
lines = [
f"=== Comprehensive Financial Metrics ({current_date}, {period}) ===\n",
]

View File

@@ -96,13 +96,19 @@ def get_prices(
list[Price]: List of Price objects
"""
ticker = normalize_symbol(ticker)
if not ticker:
return []
cached_sources = _router.price_sources()
for source in cached_sources:
cache_key = f"{ticker}_{start_date}_{end_date}_{source}"
if cached_data := _cache.get_prices(cache_key):
return [Price(**price) for price in cached_data]
prices, data_source = _router.get_prices(ticker, start_date, end_date)
try:
prices, data_source = _router.get_prices(ticker, start_date, end_date)
except Exception as exc:
logger.info("Price lookup failed for %s: %s", ticker, exc)
return []
if not prices:
return []
@@ -133,17 +139,23 @@ def get_financial_metrics(
list[FinancialMetrics]: List of financial metrics
"""
ticker = normalize_symbol(ticker)
if not ticker:
return []
for source in _router.api_sources():
cache_key = f"{ticker}_{period}_{end_date}_{limit}_{source}"
if cached_data := _cache.get_financial_metrics(cache_key):
return [FinancialMetrics(**metric) for metric in cached_data]
financial_metrics, data_source = _router.get_financial_metrics(
ticker=ticker,
end_date=end_date,
period=period,
limit=limit,
)
try:
financial_metrics, data_source = _router.get_financial_metrics(
ticker=ticker,
end_date=end_date,
period=period,
limit=limit,
)
except Exception as exc:
logger.info("Financial metrics lookup failed for %s: %s", ticker, exc)
return []
if not financial_metrics:
return []
@@ -169,6 +181,8 @@ def search_line_items(
"""
try:
ticker = normalize_symbol(ticker)
if not ticker:
return []
return _router.search_line_items(
ticker=ticker,
line_items=line_items,
@@ -190,6 +204,8 @@ def get_insider_trades(
) -> list[InsiderTrade]:
"""Fetch insider trades from cache or API."""
ticker = normalize_symbol(ticker)
if not ticker:
return []
for source in _router.api_sources():
cache_key = (
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{source}"
@@ -197,12 +213,16 @@ def get_insider_trades(
if cached_data := _cache.get_insider_trades(cache_key):
return [InsiderTrade(**trade) for trade in cached_data]
all_trades, data_source = _router.get_insider_trades(
ticker=ticker,
end_date=end_date,
start_date=start_date,
limit=limit,
)
try:
all_trades, data_source = _router.get_insider_trades(
ticker=ticker,
end_date=end_date,
start_date=start_date,
limit=limit,
)
except Exception as exc:
logger.info("Insider trades lookup failed for %s: %s", ticker, exc)
return []
if not all_trades:
return []
@@ -219,6 +239,8 @@ def get_company_news(
) -> list[CompanyNews]:
"""Fetch company news from cache or API."""
ticker = normalize_symbol(ticker)
if not ticker:
return []
for source in _router.api_sources():
cache_key = (
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{source}"
@@ -226,12 +248,16 @@ def get_company_news(
if cached_data := _cache.get_company_news(cache_key):
return [CompanyNews(**news) for news in cached_data]
all_news, data_source = _router.get_company_news(
ticker=ticker,
end_date=end_date,
start_date=start_date,
limit=limit,
)
try:
all_news, data_source = _router.get_company_news(
ticker=ticker,
end_date=end_date,
start_date=start_date,
limit=limit,
)
except Exception as exc:
logger.info("Company news lookup failed for %s: %s", ticker, exc)
return []
if not all_news:
return []
@@ -243,6 +269,8 @@ def get_company_news(
def get_market_cap(ticker: str, end_date: str) -> float | None:
"""Fetch market cap from the API. Finnhub values are converted from millions."""
ticker = normalize_symbol(ticker)
if not ticker:
return None
def _metrics_lookup(symbol: str, date: str):
for source in _router.api_sources():
@@ -256,11 +284,15 @@ def get_market_cap(ticker: str, end_date: str) -> float | None:
limit=10,
)
market_cap, _ = _router.get_market_cap(
ticker=ticker,
end_date=end_date,
metrics_lookup=_metrics_lookup,
)
try:
market_cap, _ = _router.get_market_cap(
ticker=ticker,
end_date=end_date,
metrics_lookup=_metrics_lookup,
)
except Exception as exc:
logger.info("Market cap lookup failed for %s: %s", ticker, exc)
return None
return market_cap