Add configurable data providers and localize frontend UI

This commit is contained in:
2026-03-15 00:55:12 +08:00
parent 12de93aa30
commit d233a3f55d
38 changed files with 1936 additions and 1038 deletions

View File

@@ -1,29 +1,77 @@
# -*- coding: utf-8 -*-
"""
Centralized Data Source Configuration
"""Centralized data source configuration and fallback ordering."""
Auto-detects and manages data source based on available API keys.
Priority: FINNHUB_API_KEY > FINANCIAL_DATASETS_API_KEY
"""
import os
from dataclasses import dataclass
from typing import Literal, Optional
DataSource = Literal["finnhub", "financial_datasets"]
DataSource = Literal["finnhub", "financial_datasets", "yfinance", "local_csv"]
_KNOWN_SOURCES: tuple[DataSource, ...] = (
"finnhub",
"financial_datasets",
"yfinance",
"local_csv",
)
@dataclass
class DataSourceConfig:
"""Immutable data source configuration"""
"""Resolved data source configuration."""
source: DataSource
api_key: str
sources: list[DataSource]
# Module-level cache for the resolved configuration
_config_cache: Optional[DataSourceConfig] = None
def _parse_enabled_sources() -> list[DataSource]:
"""Parse optional enabled source allowlist from the environment."""
raw_value = os.getenv("ENABLED_DATA_SOURCES", "").strip().lower()
if not raw_value:
return []
enabled: list[DataSource] = []
for item in raw_value.split(","):
candidate = item.strip()
if not candidate or candidate not in _KNOWN_SOURCES:
continue
if candidate not in enabled:
enabled.append(candidate)
return enabled
def _ordered_sources() -> list[DataSource]:
"""Resolve source preference and available fallbacks."""
preferred = os.getenv("FIN_DATA_SOURCE", "").strip().lower()
finnhub_key = os.getenv("FINNHUB_API_KEY", "").strip()
fd_key = os.getenv("FINANCIAL_DATASETS_API_KEY", "").strip()
enabled_sources = _parse_enabled_sources()
wants_yfinance = preferred == "yfinance" or "yfinance" in enabled_sources
available: list[DataSource] = []
if finnhub_key:
available.append("finnhub")
if fd_key:
available.append("financial_datasets")
if wants_yfinance:
available.append("yfinance")
available.append("local_csv")
if enabled_sources:
filtered = [source for source in enabled_sources if source in available]
if filtered:
available = filtered
if preferred in available:
ordered = [preferred]
ordered.extend(source for source in available if source != preferred)
return ordered
return available
def _resolve_config() -> DataSourceConfig:
"""
Resolve data source configuration based on available API keys.
@@ -33,21 +81,22 @@ def _resolve_config() -> DataSourceConfig:
2. FINANCIAL_DATASETS_API_KEY (if set)
3. Raises error if neither is available
"""
# Check for Finnhub API key first (higher priority)
finnhub_key = os.getenv("FINNHUB_API_KEY")
if finnhub_key:
return DataSourceConfig(source="finnhub", api_key=finnhub_key)
# Fallback to Financial Datasets API
fd_key = os.getenv("FINANCIAL_DATASETS_API_KEY")
if fd_key:
return DataSourceConfig(source="financial_datasets", api_key=fd_key)
# No API key available
raise ValueError(
"No API key found. Please set either FINNHUB_API_KEY or "
"FINANCIAL_DATASETS_API_KEY in your .env file.",
sources = _ordered_sources()
if "finnhub" in sources:
return DataSourceConfig(
source="finnhub",
api_key=os.getenv("FINNHUB_API_KEY", "").strip(),
sources=sources,
)
if "financial_datasets" in sources:
return DataSourceConfig(
source="financial_datasets",
api_key=os.getenv("FINANCIAL_DATASETS_API_KEY", "").strip(),
sources=sources,
)
if "yfinance" in sources:
return DataSourceConfig(source="yfinance", api_key="", sources=sources)
return DataSourceConfig(source="local_csv", api_key="", sources=sources)
def get_config() -> DataSourceConfig:
@@ -71,6 +120,11 @@ def get_data_source() -> DataSource:
return get_config().source
def get_data_sources() -> list[DataSource]:
"""Get preferred source ordering including fallbacks."""
return get_config().sources
def get_api_key() -> str:
"""Get the API key for the configured data source."""
return get_config().api_key

View File

@@ -1,22 +1,55 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Simple environment config helpers
"""
"""Environment config helpers with light validation and normalization."""
import os
from dataclasses import dataclass
from typing import Optional
FALSEY_ENV_VALUES = {"0", "false", "no", "off"}
PROVIDER_ALIASES = {
"openai_compatible": "OPENAI",
"openai_compat": "OPENAI",
"claude": "ANTHROPIC",
"google": "GEMINI",
"vertex": "GEMINI",
"vertexai": "GEMINI",
}
@dataclass(frozen=True)
class AgentModelConfig:
"""Resolved model config for one agent."""
model_name: str
provider: str
def _get_env_raw(key: str) -> Optional[str]:
value = os.getenv(key)
if value is None:
return None
value = value.strip()
return value or None
def get_env_str(key: str, default: str = "") -> str:
"""Get trimmed string from env."""
value = _get_env_raw(key)
return value if value is not None else default
def get_env_list(key: str, default: list = None) -> list:
"""Get comma-separated list from env"""
value = os.getenv(key, "")
"""Get comma-separated list from env."""
value = _get_env_raw(key)
if not value:
return default or []
return [item.strip() for item in value.split(",") if item.strip()]
def get_env_float(key: str, default: float = 0.0) -> float:
"""Get float from env"""
value = os.getenv(key)
"""Get float from env."""
value = _get_env_raw(key)
if value is None:
return default
try:
@@ -26,11 +59,45 @@ def get_env_float(key: str, default: float = 0.0) -> float:
def get_env_int(key: str, default: int = 0) -> int:
"""Get int from env"""
value = os.getenv(key)
"""Get int from env."""
value = _get_env_raw(key)
if value is None:
return default
try:
return int(value)
except ValueError:
return default
def get_env_bool(key: str, default: bool = False) -> bool:
"""Parse common truthy/falsey env values."""
value = _get_env_raw(key)
if value is None:
return default
return value.lower() not in FALSEY_ENV_VALUES
def canonicalize_model_provider(provider: Optional[str]) -> str:
"""Normalize provider labels to stable uppercase names."""
if not provider:
return "OPENAI"
normalized = provider.strip().lower().replace("-", "_")
normalized = PROVIDER_ALIASES.get(normalized, normalized)
return normalized.upper()
def get_agent_model_config(agent_id: str) -> AgentModelConfig:
"""Resolve model config with agent-specific override and global fallback."""
agent_key = agent_id.upper().replace("-", "_")
model_name = get_env_str(f"AGENT_{agent_key}_MODEL_NAME")
provider = get_env_str(f"AGENT_{agent_key}_MODEL_PROVIDER")
if not model_name:
model_name = get_env_str("MODEL_NAME", "gpt-4o")
if not provider:
provider = get_env_str("MODEL_PROVIDER", "OPENAI")
return AgentModelConfig(
model_name=model_name,
provider=canonicalize_model_provider(provider),
)

View File

@@ -451,6 +451,7 @@ class StateSync:
"leaderboard": self._state.get("leaderboard", []),
"portfolio": self._state.get("portfolio", {}),
"realtime_prices": self._state.get("realtime_prices", {}),
"data_sources": self._state.get("data_sources", {}),
}
if include_dashboard:

View File

@@ -4,16 +4,14 @@ Historical Price Manager for backtest mode
"""
import logging
from datetime import datetime
from pathlib import Path
from typing import Callable, Dict, List, Optional
import pandas as pd
from backend.data.provider_utils import normalize_symbol
from backend.data.provider_router import get_provider_router
logger = logging.getLogger(__name__)
# Path to local CSV data directory
_DATA_DIR = Path(__file__).parent / "ret_data"
class HistoricalPriceManager:
"""Provides historical prices for backtest mode"""
@@ -27,6 +25,7 @@ class HistoricalPriceManager:
self.open_prices = {}
self.close_prices = {}
self.running = False
self._router = get_provider_router()
def subscribe(
self,
@@ -34,12 +33,14 @@ class HistoricalPriceManager:
):
"""Subscribe to symbols"""
for symbol in symbols:
symbol = normalize_symbol(symbol)
if symbol not in self.subscribed_symbols:
self.subscribed_symbols.append(symbol)
def unsubscribe(self, symbols: List[str]):
"""Unsubscribe from symbols"""
for symbol in symbols:
symbol = normalize_symbol(symbol)
if symbol in self.subscribed_symbols:
self.subscribed_symbols.remove(symbol)
self._price_cache.pop(symbol, None)
@@ -50,19 +51,9 @@ class HistoricalPriceManager:
def _load_from_csv(self, symbol: str) -> Optional[pd.DataFrame]:
"""Load price data from local CSV file."""
csv_path = _DATA_DIR / f"{symbol}.csv"
if not csv_path.exists():
return None
try:
df = pd.read_csv(csv_path)
if df.empty or "time" not in df.columns:
return None
df["Date"] = pd.to_datetime(df["time"])
df.set_index("Date", inplace=True)
df.sort_index(inplace=True)
return df
df = self._router.load_local_price_frame(symbol)
return df if not df.empty else None
except Exception as e:
logger.warning(f"Failed to load CSV for {symbol}: {e}")
return None

View File

@@ -9,6 +9,7 @@ import random
import threading
import time
from typing import Callable, Dict, List, Optional
from backend.data.provider_utils import normalize_symbol
logger = logging.getLogger(__name__)
@@ -69,6 +70,7 @@ class MockPriceManager:
):
"""Subscribe to stock symbols"""
for symbol in symbols:
symbol = normalize_symbol(symbol)
if symbol not in self.subscribed_symbols:
self.subscribed_symbols.append(symbol)
@@ -90,6 +92,7 @@ class MockPriceManager:
def unsubscribe(self, symbols: List[str]):
"""Unsubscribe from symbols"""
for symbol in symbols:
symbol = normalize_symbol(symbol)
if symbol in self.subscribed_symbols:
self.subscribed_symbols.remove(symbol)
self.base_prices.pop(symbol, None)

View File

@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
Polling-based Price Manager - Uses Finnhub REST API
Supports real-time price fetching via polling
Polling-based Price Manager with provider-aware quote polling.
Supports Finnhub and yfinance for near real-time price fetching.
"""
import logging
import threading
@@ -9,22 +9,35 @@ import time
from typing import Callable, Dict, List, Optional
import finnhub
import yfinance as yf
from backend.data.provider_utils import normalize_symbol
logger = logging.getLogger(__name__)
class PollingPriceManager:
"""Polling-based price manager using Finnhub Quote API"""
"""Polling-based price manager using Finnhub or yfinance."""
def __init__(self, api_key: str, poll_interval: int = 30):
def __init__(
self,
api_key: Optional[str] = None,
poll_interval: int = 30,
provider: str = "finnhub",
):
"""
Args:
api_key: Finnhub API Key
poll_interval: Polling interval in seconds (default 30s)
provider: Quote provider (`finnhub` or `yfinance`)
"""
self.api_key = api_key
self.poll_interval = poll_interval
self.finnhub_client = finnhub.Client(api_key=api_key)
self.provider = provider
self.finnhub_client = (
finnhub.Client(api_key=api_key)
if provider == "finnhub" and api_key
else None
)
self.subscribed_symbols: List[str] = []
self.latest_prices: Dict[str, float] = {}
@@ -35,12 +48,14 @@ class PollingPriceManager:
self._thread: Optional[threading.Thread] = None
logger.info(
f"PollingPriceManager initialized (interval: {poll_interval}s)",
"PollingPriceManager initialized "
f"(provider: {provider}, interval: {poll_interval}s)",
)
def subscribe(self, symbols: List[str]):
"""Subscribe to stock symbols"""
for symbol in symbols:
symbol = normalize_symbol(symbol)
if symbol not in self.subscribed_symbols:
self.subscribed_symbols.append(symbol)
logger.info(f"Subscribed to: {symbol}")
@@ -48,6 +63,7 @@ class PollingPriceManager:
def unsubscribe(self, symbols: List[str]):
"""Unsubscribe from symbols"""
for symbol in symbols:
symbol = normalize_symbol(symbol)
if symbol in self.subscribed_symbols:
self.subscribed_symbols.remove(symbol)
logger.info(f"Unsubscribed: {symbol}")
@@ -60,7 +76,7 @@ class PollingPriceManager:
"""Fetch latest prices for all subscribed stocks"""
for symbol in self.subscribed_symbols:
try:
quote_data = self.finnhub_client.quote(symbol)
quote_data = self._fetch_quote(symbol)
current_price = quote_data.get("c")
open_price = quote_data.get("o")
@@ -114,6 +130,67 @@ class PollingPriceManager:
except Exception as e:
logger.error(f"Failed to fetch {symbol} price: {e}")
def _fetch_quote(self, symbol: str) -> Dict[str, float]:
"""Fetch a normalized quote payload from the configured provider."""
if self.provider == "yfinance":
return self._fetch_yfinance_quote(symbol)
if not self.finnhub_client:
raise ValueError("Finnhub API key required for finnhub polling")
return self.finnhub_client.quote(symbol)
def _fetch_yfinance_quote(self, symbol: str) -> Dict[str, float]:
"""Fetch quote data from yfinance and normalize to Finnhub-like keys."""
ticker = yf.Ticker(symbol)
fast_info = dict(getattr(ticker, "fast_info", {}) or {})
current_price = _coerce_float(
fast_info.get("lastPrice") or fast_info.get("regularMarketPrice"),
)
open_price = _coerce_float(
fast_info.get("open") or fast_info.get("regularMarketOpen"),
)
previous_close = _coerce_float(
fast_info.get("previousClose")
or fast_info.get("regularMarketPreviousClose"),
)
high_price = _coerce_float(
fast_info.get("dayHigh") or fast_info.get("regularMarketDayHigh"),
)
low_price = _coerce_float(
fast_info.get("dayLow") or fast_info.get("regularMarketDayLow"),
)
if current_price is None:
history = ticker.history(period="1d", interval="1m", auto_adjust=False)
if history.empty:
raise ValueError(f"{symbol}: No yfinance quote data")
latest = history.iloc[-1]
current_price = _coerce_float(latest.get("Close"))
open_price = open_price or _coerce_float(history.iloc[0].get("Open"))
high_price = high_price or _coerce_float(history["High"].max())
low_price = low_price or _coerce_float(history["Low"].min())
if current_price is None:
raise ValueError(f"{symbol}: Invalid yfinance quote data")
effective_open = open_price or previous_close or current_price
effective_prev_close = previous_close or effective_open or current_price
change = current_price - effective_prev_close
change_percent = (
(change / effective_prev_close) * 100 if effective_prev_close else 0.0
)
return {
"c": current_price,
"o": effective_open,
"h": high_price or max(current_price, effective_open),
"l": low_price or min(current_price, effective_open),
"pc": effective_prev_close,
"d": change,
"dp": change_percent,
"t": int(time.time()),
}
def _polling_loop(self):
"""Main polling loop"""
logger.info(f"Price polling started (interval: {self.poll_interval}s)")
@@ -173,3 +250,12 @@ class PollingPriceManager:
"""Reset open prices for new trading day"""
self.open_prices.clear()
logger.info("Open prices reset")
def _coerce_float(value) -> Optional[float]:
try:
if value is None:
return None
return float(value)
except (TypeError, ValueError):
return None

View File

@@ -0,0 +1,870 @@
# -*- coding: utf-8 -*-
"""Unified data provider router with fallback support."""
import datetime
import logging
from pathlib import Path
from typing import Callable, Optional
import finnhub
import pandas as pd
import yfinance as yf
from backend.config.data_config import DataSource, get_data_sources
from backend.data.schema import (
CompanyFactsResponse,
CompanyNews,
CompanyNewsResponse,
FinancialMetrics,
FinancialMetricsResponse,
InsiderTrade,
InsiderTradeResponse,
LineItem,
LineItemResponse,
Price,
PriceResponse,
)
logger = logging.getLogger(__name__)
_DATA_DIR = Path(__file__).parent / "ret_data"
class DataProviderRouter:
"""Route data requests across configured providers with fallbacks."""
def __init__(self):
self.sources = get_data_sources()
self._usage = {
"preferred": list(self.sources),
"last_success": {},
}
self._listeners: list[Callable[[dict], None]] = []
def price_sources(self) -> list[DataSource]:
"""Price lookup order, always allowing local CSV fallback."""
return self.sources
def api_sources(self) -> list[DataSource]:
"""Providers that can serve network-backed data."""
return [source for source in self.sources if source != "local_csv"]
def get_prices(
self,
ticker: str,
start_date: str,
end_date: str,
) -> tuple[list[Price], DataSource]:
"""Fetch prices using preferred providers with fallback."""
last_error: Optional[Exception] = None
for source in self.price_sources():
try:
if source == "finnhub":
prices = _fetch_finnhub_prices(ticker, start_date, end_date)
self._record_success("prices", source)
return prices, source
if source == "financial_datasets":
prices = _fetch_fd_prices(ticker, start_date, end_date)
self._record_success("prices", source)
return prices, source
if source == "yfinance":
prices = _fetch_yfinance_prices(ticker, start_date, end_date)
self._record_success("prices", source)
return prices, source
prices = _fetch_local_prices(ticker, start_date, end_date)
if prices:
self._record_success("prices", source)
return prices, source
except Exception as exc:
last_error = exc
logger.warning("Price source %s failed for %s: %s", source, ticker, exc)
if last_error:
raise last_error
return [], "local_csv"
def get_financial_metrics(
self,
ticker: str,
end_date: str,
period: str = "ttm",
limit: int = 10,
) -> tuple[list[FinancialMetrics], DataSource]:
"""Fetch financial metrics with API provider fallback."""
last_error: Optional[Exception] = None
for source in self.api_sources():
try:
if source == "finnhub":
metrics = _fetch_finnhub_financial_metrics(
ticker,
end_date,
period,
)
self._record_success("financial_metrics", source)
return metrics, source
if source == "yfinance":
metrics = _fetch_yfinance_financial_metrics(
ticker,
end_date,
period,
)
self._record_success("financial_metrics", source)
return metrics, source
metrics = _fetch_fd_financial_metrics(
ticker,
end_date,
period,
limit,
)
self._record_success("financial_metrics", source)
return metrics, source
except Exception as exc:
last_error = exc
logger.warning(
"Financial metrics source %s failed for %s: %s",
source,
ticker,
exc,
)
if last_error:
raise last_error
return [], "local_csv"
def search_line_items(
self,
ticker: str,
line_items: list[str],
end_date: str,
period: str = "ttm",
limit: int = 10,
) -> list[LineItem]:
"""Line items are only supported via Financial Datasets."""
if "financial_datasets" not in self.api_sources():
return []
try:
results = _fetch_fd_line_items(
ticker=ticker,
line_items=line_items,
end_date=end_date,
period=period,
limit=limit,
)
self._record_success("line_items", "financial_datasets")
return results
except Exception as exc:
logger.warning("Line items source failed for %s: %s", ticker, exc)
return []
def get_insider_trades(
self,
ticker: str,
end_date: str,
start_date: Optional[str] = None,
limit: int = 1000,
) -> tuple[list[InsiderTrade], DataSource]:
"""Fetch insider trades with provider fallback."""
last_error: Optional[Exception] = None
for source in self.api_sources():
try:
if source == "finnhub":
trades = _fetch_finnhub_insider_trades(
ticker,
start_date,
end_date,
limit,
)
self._record_success("insider_trades", source)
return trades, source
trades = _fetch_fd_insider_trades(
ticker,
start_date,
end_date,
limit,
)
self._record_success("insider_trades", source)
return trades, source
except Exception as exc:
last_error = exc
logger.warning(
"Insider trades source %s failed for %s: %s",
source,
ticker,
exc,
)
if last_error:
raise last_error
return [], "local_csv"
def get_company_news(
self,
ticker: str,
end_date: str,
start_date: Optional[str] = None,
limit: int = 1000,
) -> tuple[list[CompanyNews], DataSource]:
"""Fetch company news with provider fallback."""
last_error: Optional[Exception] = None
for source in self.api_sources():
try:
if source == "finnhub":
news = _fetch_finnhub_company_news(
ticker,
start_date,
end_date,
limit,
)
self._record_success("company_news", source)
return news, source
if source == "yfinance":
news = _fetch_yfinance_company_news(
ticker,
start_date,
end_date,
limit,
)
self._record_success("company_news", source)
return news, source
news = _fetch_fd_company_news(
ticker,
start_date,
end_date,
limit,
)
self._record_success("company_news", source)
return news, source
except Exception as exc:
last_error = exc
logger.warning(
"Company news source %s failed for %s: %s",
source,
ticker,
exc,
)
if last_error:
raise last_error
return [], "local_csv"
def get_market_cap(
self,
ticker: str,
end_date: str,
metrics_lookup,
) -> tuple[Optional[float], DataSource]:
"""Fetch market cap using facts API or financial metrics fallback."""
today = datetime.datetime.now().strftime("%Y-%m-%d")
if end_date == today and "financial_datasets" in self.api_sources():
try:
self._record_success("market_cap", "financial_datasets")
return _fetch_fd_market_cap_today(ticker), "financial_datasets"
except Exception as exc:
logger.warning(
"Market cap facts source failed for %s: %s",
ticker,
exc,
)
metrics, source = metrics_lookup(ticker, end_date)
if not metrics:
return None, source
market_cap = metrics[0].market_cap
if market_cap is None:
return None, source
if source == "finnhub":
self._record_success("market_cap", source)
return market_cap * 1_000_000, source
self._record_success("market_cap", source)
return market_cap, source
def get_usage_snapshot(self) -> dict:
"""Return provider usage metadata for UI/debugging."""
return {
"preferred": list(self._usage["preferred"]),
"last_success": dict(self._usage["last_success"]),
}
def add_listener(self, listener: Callable[[dict], None]) -> None:
"""Register a callback for provider usage changes."""
if listener not in self._listeners:
self._listeners.append(listener)
def remove_listener(self, listener: Callable[[dict], None]) -> None:
"""Remove a previously registered listener."""
if listener in self._listeners:
self._listeners.remove(listener)
def load_local_price_frame(
self,
ticker: str,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
) -> pd.DataFrame:
"""Load local CSV prices as a DataFrame for backtest managers."""
csv_path = _DATA_DIR / f"{ticker}.csv"
if not csv_path.exists():
return pd.DataFrame()
df = pd.read_csv(csv_path)
if df.empty or "time" not in df.columns:
return pd.DataFrame()
df["time"] = pd.to_datetime(df["time"])
if start_date:
df = df[df["time"] >= pd.to_datetime(start_date)]
if end_date:
df = df[df["time"] <= pd.to_datetime(end_date)]
if df.empty:
return pd.DataFrame()
df["Date"] = pd.to_datetime(df["time"])
df.set_index("Date", inplace=True)
df.sort_index(inplace=True)
self._record_success("historical_prices", "local_csv")
return df
def _record_success(self, data_type: str, source: DataSource) -> None:
previous = self._usage["last_success"].get(data_type)
self._usage["last_success"][data_type] = source
if previous != source:
snapshot = self.get_usage_snapshot()
for listener in list(self._listeners):
try:
listener(snapshot)
except Exception as exc:
logger.warning("Provider listener failed: %s", exc)
_router_instance: Optional[DataProviderRouter] = None
def get_provider_router() -> DataProviderRouter:
"""Return a shared provider router instance."""
global _router_instance
if _router_instance is None:
_router_instance = DataProviderRouter()
return _router_instance
def _get_finnhub_client() -> finnhub.Client:
api_key = _env_required("FINNHUB_API_KEY")
return finnhub.Client(api_key=api_key)
def _env_required(key: str) -> str:
import os
value = os.getenv(key, "").strip()
if not value:
raise ValueError(f"Missing required API key: {key}")
return value
def _make_api_request(url: str, headers: dict, method: str = "GET", json_data: dict = None):
import requests
response = (
requests.post(url, headers=headers, json=json_data)
if method.upper() == "POST"
else requests.get(url, headers=headers)
)
if response.status_code != 200:
raise ValueError(f"{response.status_code} - {response.text}")
return response
def _fetch_local_prices(
ticker: str,
start_date: str,
end_date: str,
) -> list[Price]:
csv_path = _DATA_DIR / f"{ticker}.csv"
if not csv_path.exists():
return []
df = pd.read_csv(csv_path)
if df.empty or "time" not in df.columns:
return []
df["time"] = pd.to_datetime(df["time"])
start = pd.to_datetime(start_date)
end = pd.to_datetime(end_date)
df = df[(df["time"] >= start) & (df["time"] <= end)].copy()
if df.empty:
return []
return [
Price(
open=float(row["open"]),
close=float(row["close"]),
high=float(row["high"]),
low=float(row["low"]),
volume=int(float(row["volume"])),
time=row["time"].strftime("%Y-%m-%d"),
)
for _, row in df.iterrows()
]
def _fetch_finnhub_prices(
ticker: str,
start_date: str,
end_date: str,
) -> list[Price]:
client = _get_finnhub_client()
start_timestamp = int(
datetime.datetime.strptime(start_date, "%Y-%m-%d").timestamp(),
)
end_timestamp = int(
(
datetime.datetime.strptime(end_date, "%Y-%m-%d")
+ datetime.timedelta(days=1)
).timestamp(),
)
candles = client.stock_candles(ticker, "D", start_timestamp, end_timestamp)
return [
Price(
open=candles["o"][i],
close=candles["c"][i],
high=candles["h"][i],
low=candles["l"][i],
volume=int(candles["v"][i]),
time=datetime.datetime.fromtimestamp(candles["t"][i]).strftime(
"%Y-%m-%d",
),
)
for i in range(len(candles.get("t", [])))
]
def _fetch_yfinance_prices(
ticker: str,
start_date: str,
end_date: str,
) -> list[Price]:
history = yf.Ticker(ticker).history(
start=start_date,
end=(
datetime.datetime.strptime(end_date, "%Y-%m-%d")
+ datetime.timedelta(days=1)
).strftime("%Y-%m-%d"),
auto_adjust=False,
actions=False,
)
if history.empty:
return []
history = history.reset_index()
date_column = "Date" if "Date" in history.columns else history.columns[0]
return [
Price(
open=float(row["Open"]),
close=float(row["Close"]),
high=float(row["High"]),
low=float(row["Low"]),
volume=int(float(row["Volume"])),
time=pd.to_datetime(row[date_column]).strftime("%Y-%m-%d"),
)
for _, row in history.iterrows()
]
def _fetch_fd_prices(
ticker: str,
start_date: str,
end_date: str,
) -> list[Price]:
headers = {"X-API-KEY": _env_required("FINANCIAL_DATASETS_API_KEY")}
url = (
"https://api.financialdatasets.ai/prices/"
f"?ticker={ticker}&interval=day&interval_multiplier=1"
f"&start_date={start_date}&end_date={end_date}"
)
response = _make_api_request(url, headers)
return PriceResponse(**response.json()).prices
def _fetch_finnhub_financial_metrics(
ticker: str,
end_date: str,
period: str,
) -> list[FinancialMetrics]:
client = _get_finnhub_client()
financials = client.company_basic_financials(ticker, "all")
metric_data = financials.get("metric", {})
if not metric_data:
return []
return [_map_finnhub_metrics(ticker, end_date, period, metric_data)]
def _fetch_fd_financial_metrics(
ticker: str,
end_date: str,
period: str,
limit: int,
) -> list[FinancialMetrics]:
headers = {"X-API-KEY": _env_required("FINANCIAL_DATASETS_API_KEY")}
url = (
"https://api.financialdatasets.ai/financial-metrics/"
f"?ticker={ticker}&report_period_lte={end_date}&limit={limit}&period={period}"
)
response = _make_api_request(url, headers)
return FinancialMetricsResponse(**response.json()).financial_metrics
def _fetch_yfinance_financial_metrics(
ticker: str,
end_date: str,
period: str,
) -> list[FinancialMetrics]:
info = yf.Ticker(ticker).info or {}
shares_outstanding = _coerce_float(info.get("sharesOutstanding"))
free_cashflow = _coerce_float(info.get("freeCashflow"))
return [
FinancialMetrics(
ticker=ticker,
report_period=end_date,
period=period,
currency=str(info.get("currency") or "USD"),
market_cap=_coerce_float(info.get("marketCap")),
enterprise_value=_coerce_float(info.get("enterpriseValue")),
price_to_earnings_ratio=_coerce_float(info.get("trailingPE")),
price_to_book_ratio=_coerce_float(info.get("priceToBook")),
price_to_sales_ratio=_coerce_float(
info.get("priceToSalesTrailing12Months"),
),
enterprise_value_to_ebitda_ratio=_coerce_float(
info.get("enterpriseToEbitda"),
),
enterprise_value_to_revenue_ratio=_coerce_float(
info.get("enterpriseToRevenue"),
),
free_cash_flow_yield=_ratio_or_none(free_cashflow, info.get("marketCap")),
peg_ratio=_coerce_float(info.get("pegRatio")),
gross_margin=_coerce_float(info.get("grossMargins")),
operating_margin=_coerce_float(info.get("operatingMargins")),
net_margin=_coerce_float(info.get("profitMargins")),
return_on_equity=_coerce_float(info.get("returnOnEquity")),
return_on_assets=_coerce_float(info.get("returnOnAssets")),
return_on_invested_capital=None,
asset_turnover=None,
inventory_turnover=None,
receivables_turnover=None,
days_sales_outstanding=None,
operating_cycle=None,
working_capital_turnover=None,
current_ratio=_coerce_float(info.get("currentRatio")),
quick_ratio=_coerce_float(info.get("quickRatio")),
cash_ratio=None,
operating_cash_flow_ratio=None,
debt_to_equity=_coerce_float(info.get("debtToEquity")),
debt_to_assets=None,
interest_coverage=None,
revenue_growth=_coerce_float(info.get("revenueGrowth")),
earnings_growth=_coerce_float(
info.get("earningsGrowth") or info.get("earningsQuarterlyGrowth"),
),
book_value_growth=None,
earnings_per_share_growth=_coerce_float(
info.get("earningsQuarterlyGrowth"),
),
free_cash_flow_growth=None,
operating_income_growth=None,
ebitda_growth=None,
payout_ratio=_coerce_float(info.get("payoutRatio")),
earnings_per_share=_coerce_float(info.get("trailingEps")),
book_value_per_share=_coerce_float(info.get("bookValue")),
free_cash_flow_per_share=_ratio_or_none(free_cashflow, shares_outstanding),
),
]
def _fetch_fd_line_items(
ticker: str,
line_items: list[str],
end_date: str,
period: str,
limit: int,
) -> list[LineItem]:
headers = {"X-API-KEY": _env_required("FINANCIAL_DATASETS_API_KEY")}
body = {
"tickers": [ticker],
"line_items": line_items,
"end_date": end_date,
"period": period,
"limit": limit,
}
response = _make_api_request(
"https://api.financialdatasets.ai/financials/search/line-items",
headers,
method="POST",
json_data=body,
)
return LineItemResponse(**response.json()).search_results[:limit]
def _fetch_finnhub_insider_trades(
ticker: str,
start_date: Optional[str],
end_date: str,
limit: int,
) -> list[InsiderTrade]:
client = _get_finnhub_client()
from_date = start_date or (
datetime.datetime.strptime(end_date, "%Y-%m-%d")
- datetime.timedelta(days=365)
).strftime("%Y-%m-%d")
insider_data = client.stock_insider_transactions(ticker, from_date, end_date)
return [
_convert_finnhub_insider_trade(ticker, trade)
for trade in insider_data.get("data", [])[:limit]
]
def _fetch_yfinance_company_news(
ticker: str,
start_date: Optional[str],
end_date: str,
limit: int,
) -> list[CompanyNews]:
news_items = getattr(yf.Ticker(ticker), "news", None) or []
start_bound = _normalize_timestamp(pd.to_datetime(start_date)) if start_date else None
end_bound = _normalize_timestamp(pd.to_datetime(end_date))
results: list[CompanyNews] = []
for item in news_items:
content = item.get("content", item)
published = (
content.get("pubDate")
or content.get("displayTime")
or item.get("providerPublishTime")
)
published_dt = _normalize_timestamp(_parse_news_datetime(published))
if published_dt is not None and published_dt > end_bound:
continue
if start_bound is not None and published_dt is not None and published_dt < start_bound:
continue
url = (
_nested_get(content, "canonicalUrl", "url")
or content.get("clickThroughUrl")
or content.get("url")
or item.get("link")
)
title = content.get("title") or item.get("title")
if not title or not url:
continue
results.append(
CompanyNews(
category=content.get("contentType") or item.get("type"),
ticker=ticker,
title=title,
related=item.get("relatedTickers", [ticker])[0]
if item.get("relatedTickers")
else ticker,
source=_nested_get(content, "provider", "displayName")
or item.get("publisher")
or "Yahoo Finance",
date=published_dt.strftime("%Y-%m-%d") if published_dt else None,
url=url,
summary=content.get("summary") or item.get("summary"),
),
)
if len(results) >= limit:
break
return results
def _map_finnhub_metrics(
ticker: str,
end_date: str,
period: str,
metric_data: dict,
) -> FinancialMetrics:
"""Map Finnhub metric data to FinancialMetrics model."""
return FinancialMetrics(
ticker=ticker,
report_period=end_date,
period=period,
currency="USD",
market_cap=metric_data.get("marketCapitalization"),
enterprise_value=None,
price_to_earnings_ratio=metric_data.get("peBasicExclExtraTTM"),
price_to_book_ratio=metric_data.get("pbAnnual"),
price_to_sales_ratio=metric_data.get("psAnnual"),
enterprise_value_to_ebitda_ratio=None,
enterprise_value_to_revenue_ratio=None,
free_cash_flow_yield=None,
peg_ratio=None,
gross_margin=metric_data.get("grossMarginTTM"),
operating_margin=metric_data.get("operatingMarginTTM"),
net_margin=metric_data.get("netProfitMarginTTM"),
return_on_equity=metric_data.get("roeTTM"),
return_on_assets=metric_data.get("roaTTM"),
return_on_invested_capital=metric_data.get("roicTTM"),
asset_turnover=metric_data.get("assetTurnoverTTM"),
inventory_turnover=metric_data.get("inventoryTurnoverTTM"),
receivables_turnover=metric_data.get("receivablesTurnoverTTM"),
days_sales_outstanding=None,
operating_cycle=None,
working_capital_turnover=None,
current_ratio=metric_data.get("currentRatioAnnual"),
quick_ratio=metric_data.get("quickRatioAnnual"),
cash_ratio=None,
operating_cash_flow_ratio=None,
debt_to_equity=metric_data.get("totalDebt/totalEquityAnnual"),
debt_to_assets=None,
interest_coverage=None,
revenue_growth=metric_data.get("revenueGrowthTTMYoy"),
earnings_growth=None,
book_value_growth=None,
earnings_per_share_growth=metric_data.get("epsGrowthTTMYoy"),
free_cash_flow_growth=None,
operating_income_growth=None,
ebitda_growth=None,
payout_ratio=metric_data.get("payoutRatioAnnual"),
earnings_per_share=metric_data.get("epsBasicExclExtraItemsTTM"),
book_value_per_share=metric_data.get("bookValuePerShareAnnual"),
free_cash_flow_per_share=None,
)
def _coerce_float(value) -> Optional[float]:
try:
if value is None:
return None
return float(value)
except (TypeError, ValueError):
return None
def _ratio_or_none(numerator, denominator) -> Optional[float]:
top = _coerce_float(numerator)
bottom = _coerce_float(denominator)
if top is None or bottom in (None, 0.0):
return None
return top / bottom
def _nested_get(payload: dict, *keys: str):
current = payload
for key in keys:
if not isinstance(current, dict):
return None
current = current.get(key)
return current
def _parse_news_datetime(value) -> Optional[pd.Timestamp]:
if value is None:
return None
try:
if isinstance(value, (int, float)):
return pd.to_datetime(int(value), unit="s")
return pd.to_datetime(value)
except (TypeError, ValueError):
return None
def _normalize_timestamp(value: Optional[pd.Timestamp]) -> Optional[pd.Timestamp]:
if value is None:
return None
if value.tzinfo is not None:
return value.tz_convert(None)
return value
def _convert_finnhub_insider_trade(ticker: str, trade: dict) -> InsiderTrade:
"""Convert Finnhub insider trade format to InsiderTrade model."""
shares_after = trade.get("share", 0)
change = trade.get("change", 0)
return InsiderTrade(
ticker=ticker,
issuer=None,
name=trade.get("name", ""),
title=None,
is_board_director=None,
transaction_date=trade.get("transactionDate", ""),
transaction_shares=abs(change),
transaction_price_per_share=trade.get("transactionPrice", 0.0),
transaction_value=abs(change) * trade.get("transactionPrice", 0.0),
shares_owned_before_transaction=(
shares_after - change if shares_after and change else None
),
shares_owned_after_transaction=float(shares_after)
if shares_after
else None,
security_title=None,
filing_date=trade.get("filingDate", ""),
)
def _fetch_fd_insider_trades(
ticker: str,
start_date: Optional[str],
end_date: str,
limit: int,
) -> list[InsiderTrade]:
headers = {"X-API-KEY": _env_required("FINANCIAL_DATASETS_API_KEY")}
url = f"https://api.financialdatasets.ai/insider-trades/?ticker={ticker}&filing_date_lte={end_date}"
if start_date:
url += f"&filing_date_gte={start_date}"
url += f"&limit={limit}"
response = _make_api_request(url, headers)
return InsiderTradeResponse(**response.json()).insider_trades
def _fetch_finnhub_company_news(
ticker: str,
start_date: Optional[str],
end_date: str,
limit: int,
) -> list[CompanyNews]:
client = _get_finnhub_client()
from_date = start_date or (
datetime.datetime.strptime(end_date, "%Y-%m-%d")
- datetime.timedelta(days=30)
).strftime("%Y-%m-%d")
news_data = client.company_news(ticker, _from=from_date, to=end_date)
return [
CompanyNews(
ticker=ticker,
title=news_item.get("headline", ""),
related=news_item.get("related", ""),
source=news_item.get("source", ""),
date=(
datetime.datetime.fromtimestamp(
news_item.get("datetime", 0),
datetime.timezone.utc,
).strftime("%Y-%m-%d")
if news_item.get("datetime")
else None
),
url=news_item.get("url", ""),
summary=news_item.get("summary", ""),
category=news_item.get("category", ""),
)
for news_item in news_data[:limit]
]
def _fetch_fd_company_news(
ticker: str,
start_date: Optional[str],
end_date: str,
limit: int,
) -> list[CompanyNews]:
headers = {"X-API-KEY": _env_required("FINANCIAL_DATASETS_API_KEY")}
url = f"https://api.financialdatasets.ai/news/?ticker={ticker}&end_date={end_date}&limit={limit}"
if start_date:
url += f"&start_date={start_date}"
response = _make_api_request(url, headers)
return CompanyNewsResponse(**response.json()).news
def _fetch_fd_market_cap_today(ticker: str) -> Optional[float]:
headers = {"X-API-KEY": _env_required("FINANCIAL_DATASETS_API_KEY")}
url = f"https://api.financialdatasets.ai/company/facts/?ticker={ticker}"
response = _make_api_request(url, headers)
return CompanyFactsResponse(**response.json()).company_facts.market_cap

View File

@@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
"""Shared market symbol normalization helpers."""
from dataclasses import dataclass
@dataclass(frozen=True)
class MarketSymbol:
"""Normalized symbol metadata."""
raw: str
canonical: str
market: str
def canonical_symbol(symbol: str) -> str:
"""Return canonical uppercase symbol for storage and routing."""
return (symbol or "").strip().upper()
def normalize_symbol(symbol: str) -> str:
"""
Normalize symbols across US and exchange-prefixed formats.
Examples:
- sh600519 -> 600519
- 600519.SH -> 600519
- aapl -> AAPL
- hk00700 -> HK00700
"""
canonical = canonical_symbol(symbol)
if canonical.startswith(("SH", "SZ", "BJ")) and len(canonical) > 2:
candidate = canonical[2:]
if candidate.isdigit() and len(candidate) in (5, 6):
return candidate
if "." in canonical:
base, suffix = canonical.rsplit(".", 1)
if suffix in {"SH", "SZ", "SS", "BJ"} and base.isdigit():
return base
return canonical
def detect_market(symbol: str) -> str:
"""Infer market tag from normalized symbol."""
normalized = normalize_symbol(symbol)
if normalized.startswith("HK") or (
normalized.isdigit() and len(normalized) == 5
):
return "hk"
if normalized.isalpha() or (
"/" not in normalized and not normalized.isdigit()
):
return "us"
return "cn"
def describe_symbol(symbol: str) -> MarketSymbol:
"""Return normalized symbol metadata."""
normalized = normalize_symbol(symbol)
return MarketSymbol(
raw=symbol,
canonical=normalized,
market=detect_market(normalized),
)

View File

@@ -3,9 +3,9 @@
AgentScope Native Model Factory
Uses native AgentScope model classes for LLM calls
"""
import os
from enum import Enum
from typing import Optional, Tuple
import os
from agentscope.formatter import (
AnthropicChatFormatter,
DashScopeChatFormatter,
@@ -20,6 +20,11 @@ from agentscope.model import (
OllamaChatModel,
OpenAIChatModel,
)
from backend.config.env_config import (
canonicalize_model_provider,
get_agent_model_config,
get_env_str,
)
class ModelProvider(Enum):
@@ -108,7 +113,7 @@ def create_model(
Returns:
AgentScope model instance
"""
provider = provider.upper()
provider = canonicalize_model_provider(provider)
model_class = PROVIDER_MODEL_MAP.get(provider)
if model_class is None:
@@ -138,19 +143,21 @@ def create_model(
# Handle custom OpenAI base URL
if provider == "OPENAI":
base_url = os.getenv("OPENAI_BASE_URL") or os.getenv("OPENAI_API_BASE")
base_url = get_env_str("OPENAI_BASE_URL") or get_env_str(
"OPENAI_API_BASE",
)
if base_url:
model_kwargs["client_args"] = {"base_url": base_url}
# Handle DashScope base URL (uses different parameter)
if provider in ("DASHSCOPE", "ALIBABA"):
base_url = os.getenv("DASHSCOPE_BASE_URL")
base_url = get_env_str("DASHSCOPE_BASE_URL")
if base_url:
model_kwargs["base_http_api_url"] = base_url
# Handle Ollama host
if provider == "OLLAMA":
host = os.getenv("OLLAMA_HOST")
host = get_env_str("OLLAMA_HOST")
if host:
model_kwargs["host"] = host
@@ -174,23 +181,11 @@ def get_agent_model(agent_id: str, stream: bool = False):
Returns:
AgentScope model instance
"""
# Normalize agent_id to uppercase for env var lookup
agent_key = agent_id.upper().replace("-", "_")
# Try agent-specific config first
model_name = os.getenv(f"AGENT_{agent_key}_MODEL_NAME")
provider = os.getenv(f"AGENT_{agent_key}_MODEL_PROVIDER")
print(f"Using specific model {model_name} for agent {agent_key}")
# Fall back to global config
if not model_name:
model_name = os.getenv("MODEL_NAME", "gpt-4o")
if not provider:
provider = os.getenv("MODEL_PROVIDER", "OPENAI")
resolved = get_agent_model_config(agent_id)
return create_model(
model_name=model_name,
provider=provider,
model_name=resolved.model_name,
provider=resolved.provider,
stream=stream,
)
@@ -205,17 +200,7 @@ def get_agent_formatter(agent_id: str):
Returns:
AgentScope formatter instance
"""
# Normalize agent_id to uppercase for env var lookup
agent_key = agent_id.upper().replace("-", "_")
# Try agent-specific config first
provider = os.getenv(f"AGENT_{agent_key}_MODEL_PROVIDER")
# Fall back to global config
if not provider:
provider = os.getenv("MODEL_PROVIDER", "OPENAI")
provider = provider.upper()
provider = get_agent_model_config(agent_id).provider
formatter_class = PROVIDER_FORMATTER_MAP.get(provider, OpenAIChatFormatter)
return formatter_class()
@@ -230,14 +215,5 @@ def get_agent_model_info(agent_id: str) -> Tuple[str, str]:
Returns:
Tuple of (model_name, provider_name)
"""
agent_key = agent_id.upper().replace("-", "_")
model_name = os.getenv(f"AGENT_{agent_key}_MODEL_NAME")
provider = os.getenv(f"AGENT_{agent_key}_MODEL_PROVIDER")
if not model_name:
model_name = os.getenv("MODEL_NAME", "gpt-4o")
if not provider:
provider = os.getenv("MODEL_PROVIDER", "OPENAI")
return model_name, provider.upper()
resolved = get_agent_model_config(agent_id)
return resolved.model_name, resolved.provider

View File

@@ -17,6 +17,7 @@ from backend.core.pipeline import TradingPipeline
from backend.core.state_sync import StateSync
from backend.services.market import MarketService
from backend.services.storage import StorageService
from backend.data.provider_router import get_provider_router
logger = logging.getLogger(__name__)
@@ -60,10 +61,14 @@ class Gateway:
# Session tracking for live returns
self._session_start_portfolio_value: Optional[float] = None
self._provider_router = get_provider_router()
self._loop: Optional[asyncio.AbstractEventLoop] = None
async def start(self, host: str = "0.0.0.0", port: int = 8766):
"""Start gateway server"""
logger.info(f"Starting gateway on {host}:{port}")
self._loop = asyncio.get_running_loop()
self._provider_router.add_listener(self._on_provider_usage_changed)
# Initialize terminal dashboard
self._dashboard.set_config(
@@ -77,6 +82,7 @@ class Gateway:
initial_cash=self.storage.initial_cash,
start_date=self._backtest_start_date or "",
end_date=self._backtest_end_date or "",
data_sources=self._provider_router.get_usage_snapshot(),
)
self._dashboard.start()
@@ -88,6 +94,10 @@ class Gateway:
"is_mock_mode",
self.config.get("mock_mode", False),
)
self.state_sync.update_state(
"data_sources",
self._provider_router.get_usage_snapshot(),
)
# Load and display existing portfolio state if available
summary = self.storage.load_file("summary")
@@ -130,6 +140,21 @@ class Gateway:
)
await asyncio.Future()
def _on_provider_usage_changed(self, snapshot: Dict[str, Any]):
"""Handle provider routing updates from the shared router."""
self.state_sync.update_state("data_sources", snapshot)
self._dashboard.update(data_sources=snapshot)
if self._loop and self._loop.is_running():
asyncio.run_coroutine_threadsafe(
self.broadcast(
{
"type": "data_sources_update",
"data_sources": snapshot,
},
),
self._loop,
)
@property
def state(self) -> Dict[str, Any]:
return self.state_sync.state
@@ -149,6 +174,9 @@ class Gateway:
state_payload = self.state_sync.get_initial_state_payload(
include_dashboard=True,
)
state_payload["data_sources"] = (
self._provider_router.get_usage_snapshot()
)
# Include market status in initial state
state_payload[
"market_status"

View File

@@ -10,6 +10,8 @@ from typing import Any, Callable, Dict, List, Optional
from zoneinfo import ZoneInfo
import pandas_market_calendars as mcal
from backend.config.data_config import get_data_source
from backend.data.provider_utils import normalize_symbol
logger = logging.getLogger(__name__)
@@ -40,7 +42,7 @@ class MarketService:
backtest_start_date: Optional[str] = None,
backtest_end_date: Optional[str] = None,
):
self.tickers = tickers
self.tickers = [normalize_symbol(ticker) for ticker in tickers]
self.poll_interval = poll_interval
self.mock_mode = mock_mode
self.backtest_mode = backtest_mode
@@ -123,11 +125,16 @@ class MarketService:
def _start_real_mode(self):
from backend.data.polling_price_manager import PollingPriceManager
if not self.api_key:
provider = get_data_source()
if provider == "local_csv":
provider = "yfinance"
if provider == "finnhub" and not self.api_key:
raise ValueError("API key required for live mode")
self._price_manager = PollingPriceManager(
api_key=self.api_key,
poll_interval=self.poll_interval,
provider=provider,
)
self._price_manager.add_price_callback(self._make_price_callback())
self._price_manager.subscribe(self.tickers)

View File

@@ -25,7 +25,7 @@ class TestAnalystAgent:
)
assert agent.analyst_type_key == "technical_analyst"
assert agent.name == "technical_analyst_analyst"
assert agent.name == "technical_analyst"
assert agent.analyst_persona == "Technical Analyst"
def test_init_invalid_analyst_type(self):

View File

@@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from backend.tools.analysis_tools import _resolved_date
def test_resolved_date_clamps_future_date():
future_date = (datetime.today() + timedelta(days=2)).strftime("%Y-%m-%d")
assert _resolved_date(future_date) == datetime.today().strftime("%Y-%m-%d")

View File

@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
"""Tests for data source config ordering."""
from backend.config.data_config import get_config, reset_config
def test_data_config_prefers_env_source(monkeypatch):
monkeypatch.setenv("FIN_DATA_SOURCE", "financial_datasets")
monkeypatch.setenv("FINNHUB_API_KEY", "fh")
monkeypatch.setenv("FINANCIAL_DATASETS_API_KEY", "fd")
reset_config()
config = get_config()
assert config.sources[0] == "financial_datasets"
assert "local_csv" in config.sources
def test_enabled_data_sources_filters_available_sources(monkeypatch):
monkeypatch.setenv("FINNHUB_API_KEY", "fh-key")
monkeypatch.setenv("FINANCIAL_DATASETS_API_KEY", "fd-key")
monkeypatch.setenv("ENABLED_DATA_SOURCES", "financial_datasets,local_csv")
monkeypatch.delenv("FIN_DATA_SOURCE", raising=False)
reset_config()
config = get_config()
assert config.sources == ["financial_datasets", "local_csv"]
assert config.source == "financial_datasets"
def test_preferred_source_reorders_enabled_sources(monkeypatch):
monkeypatch.setenv("FINNHUB_API_KEY", "fh-key")
monkeypatch.setenv("FINANCIAL_DATASETS_API_KEY", "fd-key")
monkeypatch.setenv("ENABLED_DATA_SOURCES", "financial_datasets,finnhub,local_csv")
monkeypatch.setenv("FIN_DATA_SOURCE", "finnhub")
reset_config()
config = get_config()
assert config.sources == ["finnhub", "financial_datasets", "local_csv"]
assert config.source == "finnhub"
def test_yfinance_can_be_enabled_without_api_key(monkeypatch):
monkeypatch.delenv("FINNHUB_API_KEY", raising=False)
monkeypatch.delenv("FINANCIAL_DATASETS_API_KEY", raising=False)
monkeypatch.setenv("FIN_DATA_SOURCE", "yfinance")
monkeypatch.setenv("ENABLED_DATA_SOURCES", "yfinance,local_csv")
reset_config()
config = get_config()
assert config.sources == ["yfinance", "local_csv"]
assert config.source == "yfinance"

View File

@@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
"""Tests for normalized env config helpers."""
from backend.config.env_config import (
canonicalize_model_provider,
get_agent_model_config,
)
def test_canonicalize_model_provider_aliases():
assert canonicalize_model_provider("claude") == "ANTHROPIC"
assert canonicalize_model_provider("openai_compatible") == "OPENAI"
assert canonicalize_model_provider("google") == "GEMINI"
def test_get_agent_model_config_fallback(monkeypatch):
monkeypatch.delenv("AGENT_RISK_MANAGER_MODEL_NAME", raising=False)
monkeypatch.delenv("AGENT_RISK_MANAGER_MODEL_PROVIDER", raising=False)
monkeypatch.setenv("MODEL_NAME", "gpt-4o-mini")
monkeypatch.setenv("MODEL_PROVIDER", "openai")
config = get_agent_model_config("risk_manager")
assert config.model_name == "gpt-4o-mini"
assert config.provider == "OPENAI"

View File

@@ -157,6 +157,15 @@ class TestPollingPriceManager:
assert manager.api_key == "test_key"
assert manager.poll_interval == 30
assert manager.provider == "finnhub"
assert manager.running is False
def test_init_yfinance(self):
manager = PollingPriceManager(provider="yfinance", poll_interval=15)
assert manager.api_key is None
assert manager.poll_interval == 15
assert manager.provider == "yfinance"
assert manager.running is False
def test_subscribe(self):
@@ -182,7 +191,7 @@ class TestPollingPriceManager:
assert callback in manager.price_callbacks
@patch.object(PollingPriceManager, "_fetch_prices")
def test_start_stop(self):
def test_start_stop(self, _mock_fetch_prices):
manager = PollingPriceManager(api_key="test_key", poll_interval=1)
manager.subscribe(["AAPL"])
@@ -246,6 +255,20 @@ class TestMarketService:
assert service.mock_mode is False
assert service.api_key == "test_key"
@patch("backend.services.market.get_data_source", return_value="yfinance")
@patch.object(PollingPriceManager, "start")
def test_start_real_mode_with_yfinance(self, _mock_start, _mock_source):
service = MarketService(
tickers=["AAPL"],
poll_interval=10,
mock_mode=False,
)
service._start_real_mode()
assert isinstance(service._price_manager, PollingPriceManager)
assert service._price_manager.provider == "yfinance"
@pytest.mark.asyncio
async def test_start_mock_mode(self):
service = MarketService(
@@ -264,8 +287,9 @@ class TestMarketService:
service.stop()
@patch("backend.services.market.get_data_source", return_value="finnhub")
@pytest.mark.asyncio
async def test_start_real_mode_without_api_key(self):
async def test_start_real_mode_without_api_key(self, _mock_source):
service = MarketService(
tickers=["AAPL"],
mock_mode=False,

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
"""Tests for provider router fallback behavior."""
from backend.data.provider_router import DataProviderRouter
from backend.config.data_config import reset_config
def test_router_includes_local_csv_fallback(monkeypatch):
monkeypatch.delenv("FINNHUB_API_KEY", raising=False)
monkeypatch.delenv("FINANCIAL_DATASETS_API_KEY", raising=False)
monkeypatch.delenv("FIN_DATA_SOURCE", raising=False)
reset_config()
router = DataProviderRouter()
assert router.price_sources() == ["local_csv"]
def test_router_allows_yfinance_when_enabled(monkeypatch):
monkeypatch.setenv("FIN_DATA_SOURCE", "yfinance")
monkeypatch.setenv("ENABLED_DATA_SOURCES", "yfinance,local_csv")
monkeypatch.delenv("FINNHUB_API_KEY", raising=False)
monkeypatch.delenv("FINANCIAL_DATASETS_API_KEY", raising=False)
reset_config()
router = DataProviderRouter()
assert router.price_sources() == ["yfinance", "local_csv"]
assert router.api_sources() == ["yfinance"]

View File

@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
"""Tests for market symbol normalization helpers."""
from backend.data.provider_utils import describe_symbol, normalize_symbol
def test_normalize_symbol_exchange_prefix():
assert normalize_symbol("sh600519") == "600519"
assert normalize_symbol("600519.SH") == "600519"
def test_normalize_symbol_us_ticker():
symbol = describe_symbol("aapl")
assert symbol.canonical == "AAPL"
assert symbol.market == "us"

View File

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
"""Tests for structured technical analyzer."""
import pandas as pd
from backend.tools.technical_signals import StockTechnicalAnalyzer
def test_technical_analyzer_detects_bullish_trend():
df = pd.DataFrame(
{
"time": pd.date_range("2024-01-01", periods=40, freq="D"),
"close": [100 + i for i in range(40)],
},
)
analyzer = StockTechnicalAnalyzer()
result = analyzer.analyze("AAPL", df)
assert result.current_price == 139.0
assert result.trend in {"BULLISH", "STRONG BULLISH"}
assert result.momentum_20d_pct > 0

View File

@@ -29,8 +29,10 @@ from backend.tools.data_tools import (
prices_to_df,
search_line_items,
)
from backend.tools.technical_signals import StockTechnicalAnalyzer
logger = logging.getLogger(__name__)
_technical_analyzer = StockTechnicalAnalyzer()
def _to_text_response(text: str) -> ToolResponse:
@@ -108,7 +110,12 @@ def _fmt(val, fmt=".2f", suffix="") -> str:
def _resolved_date(current_date: Optional[str]) -> str:
"""Ensure we always return a concrete date string."""
return current_date or datetime.today().strftime("%Y-%m-%d")
today = datetime.today().date()
if not current_date:
return today.strftime("%Y-%m-%d")
requested_date = datetime.strptime(current_date, "%Y-%m-%d").date()
return min(requested_date, today).strftime("%Y-%m-%d")
# ==================== Fundamental Analysis Tools ====================
@@ -419,60 +426,33 @@ def analyze_trend_following(
lines.append(f"{ticker}: Insufficient price data\n")
continue
df = prices_to_df(prices)
n = len(df)
# Calculate moving averages
sma_20_win = min(20, n // 2)
sma_50_win = min(50, n - 5) if n > 25 else min(25, n - 5)
sma_200_win = min(200, n - 10) if n > 200 else None
df["SMA_20"] = df["close"].rolling(window=sma_20_win).mean()
df["SMA_50"] = df["close"].rolling(window=sma_50_win).mean()
if sma_200_win:
df["SMA_200"] = df["close"].rolling(window=sma_200_win).mean()
df["EMA_12"] = df["close"].ewm(span=min(12, n // 3)).mean()
df["EMA_26"] = df["close"].ewm(span=min(26, n // 2)).mean()
df["MACD"] = df["EMA_12"] - df["EMA_26"]
df["MACD_signal"] = df["MACD"].ewm(span=9).mean()
current_price = _safe_float(df["close"].iloc[-1])
sma_20 = _safe_float(df["SMA_20"].iloc[-1])
sma_50 = _safe_float(df["SMA_50"].iloc[-1])
sma_200 = (
_safe_float(df["SMA_200"].iloc[-1])
if "SMA_200" in df.columns
signal = _technical_analyzer.analyze(ticker, prices_to_df(prices))
distance_200ma = (
((signal.current_price - signal.ma200) / signal.ma200) * 100
if signal.ma200
else None
)
macd = _safe_float(df["MACD"].iloc[-1])
macd_signal = _safe_float(df["MACD_signal"].iloc[-1])
macd_signal_str = (
"BUY" if signal.macd > signal.macd_signal else "SELL"
)
# Determine trend
if sma_200:
trend = "BULLISH" if current_price > sma_200 else "BEARISH"
distance_200ma = ((current_price - sma_200) / sma_200) * 100
else:
trend = "UNKNOWN"
distance_200ma = None
macd_signal_str = "BUY" if macd > macd_signal else "SELL"
lines.append(f"{ticker}: ${current_price:.2f}")
lines.append(f"{ticker}: ${signal.current_price:.2f}")
lines.append(
f" SMA20: ${sma_20:.2f} | SMA50: ${sma_50:.2f} | SMA200: {f'${sma_200:.2f}' if sma_200 else 'N/A'}",
f" MA20: ${signal.ma20:.2f} | MA50: ${signal.ma50:.2f} | MA200: {f'${signal.ma200:.2f}' if signal.ma200 else 'N/A'}",
)
lines.append(
f" MACD: {macd:.3f} | Signal: {macd_signal:.3f} -> {macd_signal_str}",
f" MACD: {signal.macd:.3f} | Signal: {signal.macd_signal:.3f} -> {macd_signal_str}",
)
lines.append(
f" Long-term Trend: {trend}"
f" Long-term Trend: {signal.trend}"
+ (
f" ({distance_200ma:+.1f}% from 200MA)"
if distance_200ma
else ""
),
)
if signal.notes:
lines.append(f" Notes: {'; '.join(signal.notes)}")
lines.append("")
return _to_text_response("\n".join(lines))
@@ -515,51 +495,29 @@ def analyze_mean_reversion(
lines.append(f"{ticker}: Insufficient price data\n")
continue
df = prices_to_df(prices)
n = len(df)
signal = _technical_analyzer.analyze(ticker, prices_to_df(prices))
deviation = (
((signal.current_price - signal.bollinger_mid) / signal.bollinger_mid)
* 100
if signal.bollinger_mid > 0
else 0
)
# Bollinger Bands
window = min(20, n - 2)
df["SMA"] = df["close"].rolling(window=window).mean()
df["STD"] = df["close"].rolling(window=window).std()
df["Upper_Band"] = df["SMA"] + (2 * df["STD"])
df["Lower_Band"] = df["SMA"] - (2 * df["STD"])
# RSI
delta = df["close"].diff()
gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
rs = gain / loss
df["RSI"] = 100 - (100 / (1 + rs))
current_price = _safe_float(df["close"].iloc[-1])
sma = _safe_float(df["SMA"].iloc[-1])
upper = _safe_float(df["Upper_Band"].iloc[-1])
lower = _safe_float(df["Lower_Band"].iloc[-1])
rsi = _safe_float(df["RSI"].iloc[-1])
deviation = (current_price - sma) / sma * 100
# Signal interpretation
if rsi > 70:
rsi_signal = "OVERBOUGHT"
elif rsi < 30:
rsi_signal = "OVERSOLD"
else:
rsi_signal = "NEUTRAL"
if current_price > upper:
if signal.current_price > signal.bollinger_upper > 0:
bb_signal = "ABOVE UPPER BAND (potential sell)"
elif current_price < lower:
elif 0 < signal.current_price < signal.bollinger_lower:
bb_signal = "BELOW LOWER BAND (potential buy)"
else:
bb_signal = "WITHIN BANDS"
lines.append(f"{ticker}: ${current_price:.2f}")
lines.append(f"{ticker}: ${signal.current_price:.2f}")
lines.append(
f" Bollinger: Lower ${lower:.2f} | SMA ${sma:.2f} | Upper ${upper:.2f}",
f" Bollinger: Lower ${signal.bollinger_lower:.2f} | Mid ${signal.bollinger_mid:.2f} | Upper ${signal.bollinger_upper:.2f}",
)
lines.append(f" Position: {bb_signal}")
lines.append(f" RSI: {rsi:.1f} -> {rsi_signal}")
lines.append(
f" RSI: {signal.rsi14:.1f} -> {signal.mean_reversion_signal}",
)
lines.append(f" Price Deviation from SMA: {deviation:+.1f}%")
lines.append("")
@@ -602,61 +560,30 @@ def analyze_momentum(
lines.append(f"{ticker}: Insufficient price data\n")
continue
df = prices_to_df(prices)
n = len(df)
df["returns"] = df["close"].pct_change()
signal = _technical_analyzer.analyze(ticker, prices_to_df(prices))
# Adaptive periods
short_p = min(5, n // 3)
med_p = min(10, n // 2)
long_p = min(20, n - 2)
current_price = _safe_float(df["close"].iloc[-1])
mom_5 = (
_safe_float(
(df["close"].iloc[-1] / df["close"].iloc[-short_p - 1] - 1)
* 100,
)
if n > short_p
else 0
)
mom_10 = (
_safe_float(
(df["close"].iloc[-1] / df["close"].iloc[-med_p - 1] - 1)
* 100,
)
if n > med_p
else 0
)
mom_20 = (
_safe_float(
(df["close"].iloc[-1] / df["close"].iloc[-long_p - 1] - 1)
* 100,
)
if n > long_p
else 0
)
volatility = _safe_float(
df["returns"].tail(20).std() * np.sqrt(252) * 100,
)
# Overall momentum signal
avg_mom = (mom_5 + mom_10 + mom_20) / 3
avg_mom = (
signal.momentum_5d_pct
+ signal.momentum_10d_pct
+ signal.momentum_20d_pct
) / 3
if avg_mom > 2:
signal = "STRONG BULLISH"
signal_text = "STRONG BULLISH"
elif avg_mom > 0:
signal = "BULLISH"
signal_text = "BULLISH"
elif avg_mom > -2:
signal = "BEARISH"
signal_text = "BEARISH"
else:
signal = "STRONG BEARISH"
signal_text = "STRONG BEARISH"
lines.append(f"{ticker}: ${current_price:.2f}")
lines.append(f"{ticker}: ${signal.current_price:.2f}")
lines.append(
f" 5-day: {mom_5:+.1f}% | 10-day: {mom_10:+.1f}% | 20-day: {mom_20:+.1f}%",
f" 5-day: {signal.momentum_5d_pct:+.1f}% | 10-day: {signal.momentum_10d_pct:+.1f}% | 20-day: {signal.momentum_20d_pct:+.1f}%",
)
lines.append(f" Volatility (annualized): {volatility:.1f}%")
lines.append(f" Overall: {signal}")
lines.append(
f" Volatility (annualized): {signal.annualized_volatility_pct:.1f}%",
)
lines.append(f" Overall: {signal_text}")
lines.append("")
return _to_text_response("\n".join(lines))
@@ -699,38 +626,26 @@ def analyze_volatility(
continue
df = prices_to_df(prices)
n = len(df)
df["returns"] = df["close"].pct_change()
# Adaptive windows
short_w = min(10, n // 2)
med_w = min(20, n - 2)
long_w = min(60, n - 1) if n > 30 else med_w
current_price = _safe_float(df["close"].iloc[-1])
signal = _technical_analyzer.analyze(ticker, df)
returns = df["close"].pct_change()
short_w = min(10, max(1, len(df) - 1))
med_w = min(20, max(1, len(df) - 1))
long_w = min(60, max(1, len(df) - 1))
vol_10 = _safe_float(
df["returns"].tail(short_w).std() * np.sqrt(252) * 100,
returns.tail(short_w).std() * np.sqrt(252) * 100,
)
vol_20 = _safe_float(
df["returns"].tail(med_w).std() * np.sqrt(252) * 100,
returns.tail(med_w).std() * np.sqrt(252) * 100,
)
vol_60 = _safe_float(
df["returns"].tail(long_w).std() * np.sqrt(252) * 100,
returns.tail(long_w).std() * np.sqrt(252) * 100,
)
# Risk assessment
if vol_20 > 50:
risk = "HIGH RISK"
elif vol_20 > 25:
risk = "MODERATE RISK"
else:
risk = "LOW RISK"
lines.append(f"{ticker}: ${current_price:.2f}")
lines.append(f"{ticker}: ${signal.current_price:.2f}")
lines.append(
f" 10-day Vol: {vol_10:.1f}% | 20-day Vol: {vol_20:.1f}% | 60-day Vol: {vol_60:.1f}%",
)
lines.append(f" Risk Level: {risk}")
lines.append(f" Risk Level: {signal.risk_level}")
lines.append("")
return _to_text_response("\n".join(lines))

View File

@@ -1,43 +1,26 @@
# -*- coding: utf-8 -*-
# flake8: noqa: E501
# pylint: disable=C0301
"""
Data fetching tools for financial data.
All functions use centralized data source configuration from data_config.py.
The data source is automatically determined based on available API keys:
- Priority: FINNHUB_API_KEY > FINANCIAL_DATASETS_API_KEY
"""
"""Data fetching tools backed by the unified provider router."""
import datetime
import time
import finnhub
import pandas as pd
import pandas_market_calendars as mcal
import requests
from backend.data.provider_utils import normalize_symbol
from backend.config.data_config import (
get_config,
get_api_key,
)
from backend.data.cache import get_cache
from backend.data.provider_router import get_provider_router
from backend.data.schema import (
CompanyFactsResponse,
CompanyNews,
CompanyNewsResponse,
FinancialMetrics,
FinancialMetricsResponse,
InsiderTrade,
InsiderTradeResponse,
LineItem,
LineItemResponse,
Price,
PriceResponse,
)
from backend.utils.settlement import logger
# Global cache instance
_cache = get_cache()
_router = get_provider_router()
def get_last_tradeday(date: str) -> str:
@@ -94,48 +77,6 @@ def get_last_tradeday(date: str) -> str:
return prev_date.strftime("%Y-%m-%d")
def _make_api_request(
url: str,
headers: dict,
method: str = "GET",
json_data: dict = None,
max_retries: int = 3,
) -> requests.Response:
"""
Make an API request with rate limiting handling and moderate backoff.
Args:
url: The URL to request
headers: Headers to include in the request
method: HTTP method (GET or POST)
json_data: JSON data for POST requests
max_retries: Maximum number of retries (default: 3)
Returns:
requests.Response: The response object
Raises:
Exception: If the request fails with a non-429 error
"""
for attempt in range(max_retries + 1): # +1 for initial attempt
if method.upper() == "POST":
response = requests.post(url, headers=headers, json=json_data)
else:
response = requests.get(url, headers=headers)
if response.status_code == 429 and attempt < max_retries:
# Linear backoff: 60s, 90s, 120s, 150s...
delay = 60 + (30 * attempt)
print(
f"Rate limited (429). Attempt {attempt + 1}/{max_retries + 1}. Waiting {delay}s before retrying...",
)
time.sleep(delay)
continue
# Return the response (whether success, other errors, or final 429)
return response
def get_prices(
ticker: str,
start_date: str,
@@ -154,75 +95,19 @@ def get_prices(
Returns:
list[Price]: List of Price objects
"""
config = get_config()
data_source = config.source
api_key = config.api_key
# Create a cache key that includes all parameters to ensure exact matches
cache_key = f"{ticker}_{start_date}_{end_date}_{data_source}"
# Check cache first - simple exact match
ticker = normalize_symbol(ticker)
cached_sources = _router.price_sources()
for source in cached_sources:
cache_key = f"{ticker}_{start_date}_{end_date}_{source}"
if cached_data := _cache.get_prices(cache_key):
return [Price(**price) for price in cached_data]
prices = []
if data_source == "finnhub":
# Use Finnhub API
client = finnhub.Client(api_key=api_key)
# Convert dates to timestamps
start_timestamp = int(
datetime.datetime.strptime(start_date, "%Y-%m-%d").timestamp(),
)
end_timestamp = int(
(
datetime.datetime.strptime(end_date, "%Y-%m-%d")
+ datetime.timedelta(days=1)
).timestamp(),
)
# Fetch candle data from Finnhub
candles = client.stock_candles(
ticker,
"D",
start_timestamp,
end_timestamp,
)
# Convert to Price objects
for i in range(len(candles["t"])):
price = Price(
open=candles["o"][i],
close=candles["c"][i],
high=candles["h"][i],
low=candles["l"][i],
volume=int(candles["v"][i]),
time=datetime.datetime.fromtimestamp(candles["t"][i]).strftime(
"%Y-%m-%d",
),
)
prices.append(price)
else: # financial_datasets
# Use Financial Datasets API
headers = {"X-API-KEY": api_key}
url = f"https://api.financialdatasets.ai/prices/?ticker={ticker}&interval=day&interval_multiplier=1&start_date={start_date}&end_date={end_date}"
response = _make_api_request(url, headers)
if response.status_code != 200:
raise ValueError(
f"Error fetching data: {ticker} - {response.status_code} - {response.text}",
)
# Parse response with Pydantic model
price_response = PriceResponse(**response.json())
prices = price_response.prices
prices, data_source = _router.get_prices(ticker, start_date, end_date)
if not prices:
return []
# Cache the results using the comprehensive cache key
cache_key = f"{ticker}_{start_date}_{end_date}_{data_source}"
_cache.set_prices(cache_key, [p.model_dump() for p in prices])
return prices
@@ -247,119 +132,29 @@ def get_financial_metrics(
Returns:
list[FinancialMetrics]: List of financial metrics
"""
config = get_config()
data_source = config.source
api_key = config.api_key
# Create a cache key that includes all parameters to ensure exact matches
cache_key = f"{ticker}_{period}_{end_date}_{limit}_{data_source}"
# Check cache first - simple exact match
ticker = normalize_symbol(ticker)
for source in _router.api_sources():
cache_key = f"{ticker}_{period}_{end_date}_{limit}_{source}"
if cached_data := _cache.get_financial_metrics(cache_key):
return [FinancialMetrics(**metric) for metric in cached_data]
financial_metrics = []
if data_source == "finnhub":
# Use Finnhub API - Basic Financials
client = finnhub.Client(api_key=api_key)
# Fetch basic financials from Finnhub
# metric='all' returns all available metrics
financials = client.company_basic_financials(ticker, "all")
if not financials or "metric" not in financials:
return []
# Finnhub returns {series: {...}, metric: {...}, metricType: ..., symbol: ...}
# We need to create a FinancialMetrics object from this
metric_data = financials.get("metric", {})
# Create a FinancialMetrics object with available data
metric = _map_finnhub_metrics(ticker, end_date, period, metric_data)
financial_metrics = [metric]
else: # financial_datasets
# Use Financial Datasets API
headers = {"X-API-KEY": api_key}
url = f"https://api.financialdatasets.ai/financial-metrics/?ticker={ticker}&report_period_lte={end_date}&limit={limit}&period={period}"
response = _make_api_request(url, headers)
if response.status_code != 200:
raise ValueError(
f"Error fetching data: {ticker} - {response.status_code} - {response.text}",
financial_metrics, data_source = _router.get_financial_metrics(
ticker=ticker,
end_date=end_date,
period=period,
limit=limit,
)
# Parse response with Pydantic model
metrics_response = FinancialMetricsResponse(**response.json())
financial_metrics = metrics_response.financial_metrics
if not financial_metrics:
return []
# Cache the results as dicts using the comprehensive cache key
cache_key = f"{ticker}_{period}_{end_date}_{limit}_{data_source}"
_cache.set_financial_metrics(
cache_key,
[m.model_dump() for m in financial_metrics],
)
return financial_metrics
def _map_finnhub_metrics(
ticker: str,
end_date: str,
period: str,
metric_data: dict,
) -> FinancialMetrics:
"""Map Finnhub metric data to FinancialMetrics model."""
return FinancialMetrics(
ticker=ticker,
report_period=end_date,
period=period,
currency="USD",
market_cap=metric_data.get("marketCapitalization"),
enterprise_value=None,
price_to_earnings_ratio=metric_data.get("peBasicExclExtraTTM"),
price_to_book_ratio=metric_data.get("pbAnnual"),
price_to_sales_ratio=metric_data.get("psAnnual"),
enterprise_value_to_ebitda_ratio=None,
enterprise_value_to_revenue_ratio=None,
free_cash_flow_yield=None,
peg_ratio=None,
gross_margin=metric_data.get("grossMarginTTM"),
operating_margin=metric_data.get("operatingMarginTTM"),
net_margin=metric_data.get("netProfitMarginTTM"),
return_on_equity=metric_data.get("roeTTM"),
return_on_assets=metric_data.get("roaTTM"),
return_on_invested_capital=metric_data.get("roicTTM"),
asset_turnover=metric_data.get("assetTurnoverTTM"),
inventory_turnover=metric_data.get("inventoryTurnoverTTM"),
receivables_turnover=metric_data.get("receivablesTurnoverTTM"),
days_sales_outstanding=None,
operating_cycle=None,
working_capital_turnover=None,
current_ratio=metric_data.get("currentRatioAnnual"),
quick_ratio=metric_data.get("quickRatioAnnual"),
cash_ratio=None,
operating_cash_flow_ratio=None,
debt_to_equity=metric_data.get("totalDebt/totalEquityAnnual"),
debt_to_assets=None,
interest_coverage=None,
revenue_growth=metric_data.get("revenueGrowthTTMYoy"),
earnings_growth=None,
book_value_growth=None,
earnings_per_share_growth=metric_data.get("epsGrowthTTMYoy"),
free_cash_flow_growth=None,
operating_income_growth=None,
ebitda_growth=None,
payout_ratio=metric_data.get("payoutRatioAnnual"),
earnings_per_share=metric_data.get("epsBasicExclExtraItemsTTM"),
book_value_per_share=metric_data.get("bookValuePerShareAnnual"),
free_cash_flow_per_share=None,
)
def search_line_items(
ticker: str,
line_items: list[str],
@@ -373,123 +168,20 @@ def search_line_items(
Returns empty list on API errors to allow graceful degradation.
"""
try:
api_key = get_api_key()
headers = {"X-API-KEY": api_key}
url = "https://api.financialdatasets.ai/financials/search/line-items"
body = {
"tickers": [ticker],
"line_items": line_items,
"end_date": end_date,
"period": period,
"limit": limit,
}
response = _make_api_request(
url,
headers,
method="POST",
json_data=body,
ticker = normalize_symbol(ticker)
return _router.search_line_items(
ticker=ticker,
line_items=line_items,
end_date=end_date,
period=period,
limit=limit,
)
if response.status_code != 200:
logger.info(
f"Warning: Failed to fetch line items for {ticker}: "
f"{response.status_code} - {response.text}",
)
return []
data = response.json()
response_model = LineItemResponse(**data)
search_results = response_model.search_results
if not search_results:
return []
return search_results[:limit]
except Exception as e:
logger.info(
f"Warning: Exception while fetching line items for {ticker}: {str(e)}",
)
return []
def _fetch_finnhub_insider_trades(
ticker: str,
start_date: str | None,
end_date: str,
limit: int,
api_key: str,
) -> list[InsiderTrade]:
"""Fetch insider trades from Finnhub API."""
client = finnhub.Client(api_key=api_key)
from_date = start_date or (
datetime.datetime.strptime(end_date, "%Y-%m-%d")
- datetime.timedelta(days=365)
).strftime("%Y-%m-%d")
insider_data = client.stock_insider_transactions(
ticker,
from_date,
end_date,
)
if not insider_data or "data" not in insider_data:
return []
return [
_convert_finnhub_insider_trade(ticker, trade)
for trade in insider_data["data"][:limit]
]
def _fetch_fd_insider_trades(
ticker: str,
start_date: str | None,
end_date: str,
limit: int,
api_key: str,
) -> list[InsiderTrade]:
"""Fetch insider trades from Financial Datasets API."""
headers = {"X-API-KEY": api_key}
all_trades = []
current_end_date = end_date
while True:
url = f"https://api.financialdatasets.ai/insider-trades/?ticker={ticker}&filing_date_lte={current_end_date}"
if start_date:
url += f"&filing_date_gte={start_date}"
url += f"&limit={limit}"
response = _make_api_request(url, headers)
if response.status_code != 200:
raise ValueError(
f"Error fetching data: {ticker} - {response.status_code} - {response.text}",
)
data = response.json()
response_model = InsiderTradeResponse(**data)
insider_trades = response_model.insider_trades
if not insider_trades:
break
all_trades.extend(insider_trades)
if not start_date or len(insider_trades) < limit:
break
current_end_date = min(
trade.filing_date for trade in insider_trades
).split("T")[0]
if current_end_date <= start_date:
break
return all_trades
def get_insider_trades(
ticker: str,
end_date: str,
@@ -497,133 +189,28 @@ def get_insider_trades(
limit: int = 1000,
) -> list[InsiderTrade]:
"""Fetch insider trades from cache or API."""
config = get_config()
data_source = config.source
api_key = config.api_key
ticker = normalize_symbol(ticker)
for source in _router.api_sources():
cache_key = (
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{data_source}"
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{source}"
)
if cached_data := _cache.get_insider_trades(cache_key):
return [InsiderTrade(**trade) for trade in cached_data]
if data_source == "finnhub":
all_trades = _fetch_finnhub_insider_trades(
ticker,
start_date,
end_date,
limit,
api_key,
)
else:
all_trades = _fetch_fd_insider_trades(
ticker,
start_date,
end_date,
limit,
api_key,
all_trades, data_source = _router.get_insider_trades(
ticker=ticker,
end_date=end_date,
start_date=start_date,
limit=limit,
)
if not all_trades:
return []
_cache.set_insider_trades(
cache_key,
[trade.model_dump() for trade in all_trades],
)
cache_key = f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{data_source}"
_cache.set_insider_trades(cache_key, [trade.model_dump() for trade in all_trades])
return all_trades
def _fetch_finnhub_company_news(
ticker: str,
start_date: str | None,
end_date: str,
limit: int,
api_key: str,
) -> list[CompanyNews]:
"""Fetch company news from Finnhub API."""
client = finnhub.Client(api_key=api_key)
from_date = start_date or (
datetime.datetime.strptime(end_date, "%Y-%m-%d")
- datetime.timedelta(days=30)
).strftime("%Y-%m-%d")
news_data = client.company_news(ticker, _from=from_date, to=end_date)
if not news_data:
return []
all_news = []
for news_item in news_data[:limit]:
company_news = CompanyNews(
ticker=ticker,
title=news_item.get("headline", ""),
related=news_item.get("related", ""),
source=news_item.get("source", ""),
date=(
datetime.datetime.fromtimestamp(
news_item.get("datetime", 0),
datetime.timezone.utc,
).strftime("%Y-%m-%d")
if news_item.get("datetime")
else None
),
url=news_item.get("url", ""),
summary=news_item.get("summary", ""),
category=news_item.get("category", ""),
)
all_news.append(company_news)
return all_news
def _fetch_fd_company_news(
ticker: str,
start_date: str | None,
end_date: str,
limit: int,
api_key: str,
) -> list[CompanyNews]:
"""Fetch company news from Financial Datasets API."""
headers = {"X-API-KEY": api_key}
all_news = []
current_end_date = end_date
while True:
url = f"https://api.financialdatasets.ai/news/?ticker={ticker}&end_date={current_end_date}"
if start_date:
url += f"&start_date={start_date}"
url += f"&limit={limit}"
response = _make_api_request(url, headers)
if response.status_code != 200:
raise ValueError(
f"Error fetching data: {ticker} - {response.status_code} - {response.text}",
)
data = response.json()
response_model = CompanyNewsResponse(**data)
company_news = response_model.news
if not company_news:
break
all_news.extend(company_news)
if not start_date or len(company_news) < limit:
break
current_end_date = min(
news.date for news in company_news if news.date is not None
).split("T")[0]
if current_end_date <= start_date:
break
return all_news
def get_company_news(
ticker: str,
end_date: str,
@@ -631,102 +218,49 @@ def get_company_news(
limit: int = 1000,
) -> list[CompanyNews]:
"""Fetch company news from cache or API."""
config = get_config()
data_source = config.source
api_key = config.api_key
ticker = normalize_symbol(ticker)
for source in _router.api_sources():
cache_key = (
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{data_source}"
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{source}"
)
if cached_data := _cache.get_company_news(cache_key):
return [CompanyNews(**news) for news in cached_data]
if data_source == "finnhub":
all_news = _fetch_finnhub_company_news(
ticker,
start_date,
end_date,
limit,
api_key,
)
else:
all_news = _fetch_fd_company_news(
ticker,
start_date,
end_date,
limit,
api_key,
all_news, data_source = _router.get_company_news(
ticker=ticker,
end_date=end_date,
start_date=start_date,
limit=limit,
)
if not all_news:
return []
_cache.set_company_news(
cache_key,
[news.model_dump() for news in all_news],
)
cache_key = f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{data_source}"
_cache.set_company_news(cache_key, [news.model_dump() for news in all_news])
return all_news
def _convert_finnhub_insider_trade(ticker: str, trade: dict) -> InsiderTrade:
"""Convert Finnhub insider trade format to InsiderTrade model."""
shares_after = trade.get("share", 0)
change = trade.get("change", 0)
return InsiderTrade(
ticker=ticker,
issuer=None,
name=trade.get("name", ""),
title=None,
is_board_director=None,
transaction_date=trade.get("transactionDate", ""),
transaction_shares=abs(change),
transaction_price_per_share=trade.get("transactionPrice", 0.0),
transaction_value=abs(change) * trade.get("transactionPrice", 0.0),
shares_owned_before_transaction=(
shares_after - change if shares_after and change else None
),
shares_owned_after_transaction=float(shares_after)
if shares_after
else None,
security_title=None,
filing_date=trade.get("filingDate", ""),
)
def get_market_cap(ticker: str, end_date: str) -> float | None:
"""Fetch market cap from the API. Finnhub values are converted from millions."""
config = get_config()
data_source = config.source
api_key = config.api_key
ticker = normalize_symbol(ticker)
# For today's date, use company facts API
if end_date == datetime.datetime.now().strftime("%Y-%m-%d"):
headers = {"X-API-KEY": api_key}
url = (
f"https://api.financialdatasets.ai/company/facts/?ticker={ticker}"
def _metrics_lookup(symbol: str, date: str):
for source in _router.api_sources():
cache_key = f"{symbol}_ttm_{date}_10_{source}"
if cached_data := _cache.get_financial_metrics(cache_key):
return [FinancialMetrics(**metric) for metric in cached_data], source
return _router.get_financial_metrics(
ticker=symbol,
end_date=date,
period="ttm",
limit=10,
)
response = _make_api_request(url, headers)
if response.status_code != 200:
return None
data = response.json()
response_model = CompanyFactsResponse(**data)
return response_model.company_facts.market_cap
financial_metrics = get_financial_metrics(ticker, end_date)
if not financial_metrics:
return None
market_cap = financial_metrics[0].market_cap
if not market_cap:
return None
# Finnhub returns market cap in millions
if data_source == "finnhub":
market_cap = market_cap * 1_000_000
market_cap, _ = _router.get_market_cap(
ticker=ticker,
end_date=end_date,
metrics_lookup=_metrics_lookup,
)
return market_cap

View File

@@ -0,0 +1,193 @@
# -*- coding: utf-8 -*-
"""Structured technical signal analysis used by technical tools."""
from dataclasses import dataclass, field
from typing import Dict, List, Optional
import pandas as pd
@dataclass
class TechnicalSignal:
"""Structured technical analysis result for one ticker."""
ticker: str
current_price: float = 0.0
ma5: float = 0.0
ma10: float = 0.0
ma20: float = 0.0
ma50: float = 0.0
ma200: Optional[float] = None
bias_ma5_pct: float = 0.0
momentum_5d_pct: float = 0.0
momentum_10d_pct: float = 0.0
momentum_20d_pct: float = 0.0
annualized_volatility_pct: float = 0.0
rsi14: float = 50.0
macd: float = 0.0
macd_signal: float = 0.0
bollinger_upper: float = 0.0
bollinger_mid: float = 0.0
bollinger_lower: float = 0.0
trend: str = "NEUTRAL"
mean_reversion_signal: str = "NEUTRAL"
risk_level: str = "MODERATE RISK"
notes: List[str] = field(default_factory=list)
def to_summary(self) -> Dict[str, object]:
"""Compact dict for logs/tests."""
return {
"ticker": self.ticker,
"trend": self.trend,
"mean_reversion_signal": self.mean_reversion_signal,
"risk_level": self.risk_level,
"current_price": self.current_price,
"rsi14": self.rsi14,
"annualized_volatility_pct": self.annualized_volatility_pct,
}
class StockTechnicalAnalyzer:
"""Lightweight technical analyzer adapted for EvoTraders tools."""
def analyze(self, ticker: str, df: pd.DataFrame) -> TechnicalSignal:
"""Analyze one ticker from OHLC price history."""
result = TechnicalSignal(ticker=ticker)
if df is None or df.empty or len(df) < 5:
result.notes.append("Insufficient price data")
return result
frame = df.sort_values("time").reset_index(drop=True).copy()
frame["close"] = pd.to_numeric(frame["close"], errors="coerce")
frame["returns"] = frame["close"].pct_change()
for window in (5, 10, 20, 50, 200):
frame[f"MA_{window}"] = frame["close"].rolling(window).mean()
frame["EMA_12"] = frame["close"].ewm(span=12, adjust=False).mean()
frame["EMA_26"] = frame["close"].ewm(span=26, adjust=False).mean()
frame["MACD"] = frame["EMA_12"] - frame["EMA_26"]
frame["MACD_SIGNAL"] = (
frame["MACD"].ewm(span=9, adjust=False).mean()
)
delta = frame["close"].diff()
gain = delta.where(delta > 0, 0.0)
loss = -delta.where(delta < 0, 0.0)
avg_gain = gain.rolling(14).mean()
avg_loss = loss.rolling(14).mean()
rs = avg_gain / avg_loss.replace(0, pd.NA)
frame["RSI_14"] = 100 - (100 / (1 + rs))
frame["BB_MID"] = frame["close"].rolling(20).mean()
frame["BB_STD"] = frame["close"].rolling(20).std()
frame["BB_UPPER"] = frame["BB_MID"] + 2 * frame["BB_STD"]
frame["BB_LOWER"] = frame["BB_MID"] - 2 * frame["BB_STD"]
latest = frame.iloc[-1]
result.current_price = _safe_number(latest["close"])
result.ma5 = _safe_number(latest["MA_5"])
result.ma10 = _safe_number(latest["MA_10"])
result.ma20 = _safe_number(latest["MA_20"])
result.ma50 = _safe_number(latest["MA_50"])
result.ma200 = _safe_optional(latest["MA_200"])
result.bias_ma5_pct = _percent_gap(result.current_price, result.ma5)
result.momentum_5d_pct = _lookback_return(frame["close"], 5)
result.momentum_10d_pct = _lookback_return(frame["close"], 10)
result.momentum_20d_pct = _lookback_return(frame["close"], 20)
result.annualized_volatility_pct = _safe_number(
frame["returns"].tail(20).std() * (252**0.5) * 100,
)
result.rsi14 = _safe_number(latest["RSI_14"], default=50.0)
result.macd = _safe_number(latest["MACD"])
result.macd_signal = _safe_number(latest["MACD_SIGNAL"])
result.bollinger_mid = _safe_number(latest["BB_MID"])
result.bollinger_upper = _safe_number(latest["BB_UPPER"])
result.bollinger_lower = _safe_number(latest["BB_LOWER"])
result.trend = _classify_trend(result)
result.mean_reversion_signal = _classify_mean_reversion(result)
result.risk_level = _classify_risk(result.annualized_volatility_pct)
result.notes = _build_notes(result)
return result
def _safe_number(value, default: float = 0.0) -> float:
try:
if pd.isna(value):
return default
return float(value)
except (TypeError, ValueError):
return default
def _safe_optional(value) -> Optional[float]:
try:
if pd.isna(value):
return None
return float(value)
except (TypeError, ValueError):
return None
def _lookback_return(series: pd.Series, lookback: int) -> float:
if len(series) <= lookback:
return 0.0
base = _safe_number(series.iloc[-lookback - 1])
latest = _safe_number(series.iloc[-1])
if base <= 0:
return 0.0
return ((latest / base) - 1) * 100
def _percent_gap(value: float, anchor: float) -> float:
if anchor <= 0:
return 0.0
return ((value - anchor) / anchor) * 100
def _classify_trend(result: TechnicalSignal) -> str:
bullish_stack = (
result.current_price >= result.ma5 >= result.ma10 >= result.ma20 > 0
)
if bullish_stack and result.macd >= result.macd_signal:
return "STRONG BULLISH"
if bullish_stack:
return "BULLISH"
if result.current_price < result.ma20 and result.macd < result.macd_signal:
return "BEARISH"
return "NEUTRAL"
def _classify_mean_reversion(result: TechnicalSignal) -> str:
if result.rsi14 <= 30 or (
result.bollinger_lower > 0
and result.current_price <= result.bollinger_lower
):
return "OVERSOLD"
if result.rsi14 >= 70 or (
result.bollinger_upper > 0
and result.current_price >= result.bollinger_upper
):
return "OVERBOUGHT"
return "NEUTRAL"
def _classify_risk(volatility_pct: float) -> str:
if volatility_pct > 50:
return "HIGH RISK"
if volatility_pct > 25:
return "MODERATE RISK"
return "LOW RISK"
def _build_notes(result: TechnicalSignal) -> List[str]:
notes = []
if abs(result.bias_ma5_pct) > 5:
notes.append("Price extended from MA5")
if result.macd > result.macd_signal:
notes.append("MACD supports upside momentum")
if result.mean_reversion_signal == "OVERSOLD":
notes.append("Potential rebound setup")
if result.mean_reversion_signal == "OVERBOUGHT":
notes.append("Potential pullback setup")
return notes

View File

@@ -38,6 +38,7 @@ class TerminalDashboard:
self.end_date = ""
self.tickers: List[str] = []
self.initial_cash = 100000.0
self.data_sources: Dict[str, Any] = {}
# Trading state
self.current_date = "-"
@@ -72,6 +73,7 @@ class TerminalDashboard:
end_date: str = "",
tickers: List[str] = None,
initial_cash: float = 100000.0,
data_sources: Dict[str, Any] = None,
):
"""Set configuration state"""
self.mode = mode
@@ -88,6 +90,7 @@ class TerminalDashboard:
self.end_date = end_date
self.tickers = tickers or []
self.initial_cash = initial_cash
self.data_sources = data_sources or {}
self.total_value = initial_cash
self.cash = initial_cash
@@ -114,6 +117,11 @@ class TerminalDashboard:
left.add_row(f"[bold]Mode:[/bold] {mode_str}")
left.add_row(f"[dim]Config:[/dim] {self.config_name}")
left.add_row(f"[dim]Server:[/dim] {self.host}:{self.port}")
preferred_sources = self.data_sources.get("preferred", [])
if preferred_sources:
left.add_row(
f"[dim]Data:[/dim] {' -> '.join(preferred_sources)}",
)
if self.mode == "live" and self.nyse_time:
left.add_row(f"[dim]NYSE:[/dim] {self.nyse_time[:19]}")
@@ -265,6 +273,7 @@ class TerminalDashboard:
trades: List[Dict] = None,
days_completed: int = None,
days_total: int = None,
data_sources: Dict[str, Any] = None,
):
"""Update dashboard state and refresh display"""
if date:
@@ -297,6 +306,8 @@ class TerminalDashboard:
self.holdings = holdings
if trades is not None:
self.trades = trades
if data_sources is not None:
self.data_sources = data_sources
if self.live:
self.live.update(self._build_panel())

View File

@@ -0,0 +1,42 @@
server {
listen 80;
server_name evotraders.cillinn.com;
location /.well-known/acme-challenge/ {
root /var/www/evotraders/current;
allow all;
}
location / {
return 301 https://$host$request_uri;
}
}
server {
listen 443 ssl http2;
server_name evotraders.cillinn.com;
root /var/www/evotraders/current;
index index.html;
ssl_certificate /etc/letsencrypt/live/evotraders.cillinn.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/evotraders.cillinn.com/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
location /ws {
proxy_pass http://127.0.0.1:8765;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_read_timeout 300s;
}
location / {
try_files $uri $uri/ /index.html;
}
}

View File

@@ -0,0 +1,15 @@
server {
listen 80;
server_name evotraders.cillinn.com;
root /var/www/evotraders/current;
index index.html;
location /.well-known/acme-challenge/ {
allow all;
}
location / {
try_files $uri $uri/ /index.html;
}
}

View File

@@ -0,0 +1,14 @@
[Unit]
Description=EvoTraders Production Service
After=network.target
[Service]
Type=simple
WorkingDirectory=/root/code/evotraders
ExecStart=/root/code/evotraders/scripts/run_prod.sh
Restart=always
RestartSec=5
Environment=PYTHONUNBUFFERED=1
[Install]
WantedBy=multi-user.target

View File

@@ -9,7 +9,8 @@ TICKERS=AAPL,MSFT,GOOGL,NVDA,TSLA,META,AMZN
# finnhub: https://finnhub.io/register
# financial datasets: https://www.financialdatasets.ai/
FIN_DATA_SOURCE = #finnhub or financial_datasets | finnhub 或 financial_datasets
FIN_DATA_SOURCE = # Preferred source: finnhub / financial_datasets / yfinance / local_csv | 首选数据源
ENABLED_DATA_SOURCES = # Optional allowlist, comma-separated, e.g. yfinance,finnhub,financial_datasets,local_csv | 可启用数据源列表
FINANCIAL_DATASETS_API_KEY= #required | 必填
FINNHUB_API_KEY= #optional | 可选

View File

@@ -83,6 +83,7 @@ export default function LiveTradingApp() {
const [serverMode, setServerMode] = useState(null); // 'live' | 'backtest' | null
const [marketStatus, setMarketStatus] = useState(null); // { status, status_text, ... }
const [virtualTime, setVirtualTime] = useState(null); // Virtual time from server (for mock mode)
const [dataSources, setDataSources] = useState(null);
const clientRef = useRef(null);
const containerRef = useRef(null);
@@ -290,6 +291,9 @@ export default function LiveTradingApp() {
if (state.server_mode) {
setServerMode(state.server_mode);
}
if (state.data_sources) {
setDataSources(state.data_sources);
}
// 检查是否是mock模式
const isMockMode = state.is_mock_mode === true;
if (state.market_status) {
@@ -365,6 +369,12 @@ export default function LiveTradingApp() {
}
},
data_sources_update: (e) => {
if (e.data_sources) {
setDataSources(e.data_sources);
}
},
// Real-time price updates
price_update: (e) => {
try {
@@ -864,6 +874,14 @@ export default function LiveTradingApp() {
</span>
</>
)}
{dataSources?.last_success?.prices && (
<>
<span className="status-sep">·</span>
<span className="market-text backtest">
DATA {String(dataSources.last_success.prices).toUpperCase()}
</span>
</>
)}
<span className="status-sep">·</span>
<span className="time-text">{lastUpdate.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false })}</span>
</div>

View File

@@ -3,7 +3,7 @@ import Header from './Header.jsx';
export default function AboutModal({ onClose }) {
const [isClosing, setIsClosing] = useState(false);
const [language, setLanguage] = useState('en'); // 'en' or 'zh'
const [language] = useState('zh');
const handleClose = () => {
setIsClosing(true);
@@ -188,79 +188,14 @@ export default function AboutModal({ onClose }) {
{/* Content */}
<div style={contentStyle} onClick={(e) => e.stopPropagation()}>
{/* Language Switch */}
<div style={languageSwitchStyle}>
<span
style={getLangStyle(language === 'zh')}
onClick={() => setLanguage('zh')}
style={getLangStyle(true)}
>
中文
</span>
<span style={{ padding: '0 4px', color: '#999' }}></span>
<span
style={getLangStyle(language === 'en')}
onClick={() => setLanguage('en')}
>
EN
</span>
</div>
{language === 'en' ? (
// English Content
<>
<div style={{ marginBottom: '40px', fontSize: '15px', fontWeight: 600 }}>
{content.en.question}
<span style={highlight}>{content.en.questionHighlight}</span>
{content.en.questionEnd}
</div>
<div style={{ marginBottom: '30px' }}>
{content.en.intro}
<span style={highlight}>{content.en.introHighlight1}</span>
{content.en.introContinue}
<span style={highlight}>{content.en.introHighlight2}</span>
{content.en.introContinue2}
</div>
<div style={{ marginBottom: '25px' }}>
<span style={highlight}>{content.en.point1Highlight}</span>
{content.en.point1}
</div>
<div style={{ marginBottom: '25px' }}>
<span style={highlight}>{content.en.point2Highlight}</span>
{content.en.point2}
</div>
<div style={{ marginBottom: '40px' }}>
<span style={highlight}>{content.en.point3Highlight}</span>
{content.en.point3}
</div>
<div style={{ marginBottom: '25px', opacity: 0.7 }}>
Everything is fully open-source. Built on{' '}
<a
href="https://github.com/agentscope-ai"
target="_blank"
rel="noopener noreferrer"
style={linkStyle}
>
AgentScope
</a>
, using{' '}
<a
href="https://github.com/agentscope-ai/ReMe"
target="_blank"
rel="noopener noreferrer"
style={linkStyle}
>
ReMe
</a>
{' '}for memory management.
</div>
</>
) : (
// Chinese Content
<>
<div style={{ marginBottom: '30px' }}>
@@ -309,7 +244,7 @@ export default function AboutModal({ onClose }) {
</div>
<div style={{ marginBottom: '10px', opacity: 0.7 }}>
我们已经在github上开源
我们已经在 GitHub 上开源
</div>
<div style={{ marginBottom: '25px', opacity: 0.7 }}>
EvoTraders 基于{' '}
@@ -337,7 +272,6 @@ export default function AboutModal({ onClose }) {
你可以在此找到完整项目与示例
</div>
</>
)}
<div style={{ marginTop: '40px' }}>
<a
@@ -351,11 +285,10 @@ export default function AboutModal({ onClose }) {
</div>
<div style={closeHintStyle} onClick={handleClose}>
{language === 'en' ? 'Click here to close' : '点击此处关闭'}
点击此处关闭
</div>
</div>
</div>
</>
);
}

View File

@@ -124,7 +124,7 @@ export default function AgentCard({ agent, onClose, isClosing }) {
</div>
{rankMedal && !isPortfolioManager && (
<div style={{ fontSize: 18 }}>
{rankMedal.emoji} Rank #{agent.rank}
{rankMedal.emoji} {agent.rank}
</div>
)}
</div>
@@ -188,7 +188,7 @@ export default function AgentCard({ agent, onClose, isClosing }) {
position: 'relative',
cursor: 'help'
}}
title={`Model: ${agent.modelName}\nProvider: ${modelInfo.provider}`}>
title={`模型:${agent.modelName}\n提供方:${modelInfo.provider}`}>
<div style={{
fontSize: 10,
fontWeight: 700,
@@ -272,7 +272,7 @@ export default function AgentCard({ agent, onClose, isClosing }) {
lineHeight: 1,
marginBottom: 2
}}>
{overallWinRate != null ? `${(overallWinRate * 100).toFixed(1)}%` : 'N/A'}
{overallWinRate != null ? `${(overallWinRate * 100).toFixed(1)}%` : '暂无'}
</div>
<div style={{
fontSize: 9,
@@ -318,7 +318,7 @@ export default function AgentCard({ agent, onClose, isClosing }) {
marginBottom: 2,
lineHeight: 1
}}>
{bullWinRate != null ? `${(bullWinRate * 100).toFixed(1)}%` : 'N/A'}
{bullWinRate != null ? `${(bullWinRate * 100).toFixed(1)}%` : '暂无'}
</div>
<div style={{
fontSize: 9,
@@ -355,7 +355,7 @@ export default function AgentCard({ agent, onClose, isClosing }) {
marginBottom: 2,
lineHeight: 1
}}>
{bearWinRate != null ? `${(bearWinRate * 100).toFixed(1)}%` : 'N/A'}
{bearWinRate != null ? `${(bearWinRate * 100).toFixed(1)}%` : '暂无'}
</div>
<div style={{
fontSize: 9,
@@ -439,7 +439,7 @@ export default function AgentCard({ agent, onClose, isClosing }) {
fontSize: 8,
color: '#555555'
}}>
{signal.date?.substring(5, 10) || 'N/A'}
{signal.date?.substring(5, 10) || '暂无'}
</div>
<div style={{
fontSize: resultFontSize,
@@ -514,4 +514,3 @@ export default function AgentCard({ agent, onClose, isClosing }) {
</div>
);
}

View File

@@ -525,7 +525,7 @@ function MessageItem({ message, itemId, isHighlighted, getAgentModelInfo }) {
const colors = message.agent === 'Memory' ? MESSAGE_COLORS.memory :
getAgentColors(message.agentId, message.agent);
const title = message.agent === 'Memory' ? '记忆' : message.agent || 'AGENT';
const title = message.agent === 'Memory' ? '记忆' : message.agent || '智能体';
const agentModelData = message.agentId && getAgentModelInfo ?
getAgentModelInfo(message.agentId) :

View File

@@ -13,24 +13,24 @@ export default function PerformanceView({ leaderboard }) {
{/* Agent Performance Section */}
<div className="section">
<div className="section-header">
<h2 className="section-title">Agent Performance - Signal Accuracy</h2>
<h2 className="section-title">分析师表现 - 信号准确率</h2>
</div>
{rankedAgents.length === 0 ? (
<div className="empty-state">No leaderboard data available</div>
<div className="empty-state">暂无排行榜数据</div>
) : (
<div className="table-wrapper">
<table className="data-table">
<thead>
<tr>
<th>Rank</th>
<th>Agent</th>
<th>Win Rate</th>
<th>Bull Signals</th>
<th>Bull Win Rate</th>
<th>Bear Signals</th>
<th>Bear Win Rate</th>
<th>Total Signals</th>
<th>排名</th>
<th>分析师</th>
<th>胜率</th>
<th>看涨信号</th>
<th>看涨胜率</th>
<th>看跌信号</th>
<th>看跌胜率</th>
<th>总信号数</th>
</tr>
</thead>
<tbody>
@@ -66,27 +66,27 @@ export default function PerformanceView({ leaderboard }) {
<div style={{ fontSize: 10, color: '#666666' }}>{agent.role}</div>
</td>
<td style={{ fontWeight: 700, color: overallColor }}>
{overallWinRate != null ? `${(overallWinRate * 100).toFixed(1)}%` : 'N/A'}
{overallWinRate != null ? `${(overallWinRate * 100).toFixed(1)}%` : '暂无'}
</td>
<td>
<div style={{ fontSize: 12 }}>{bullTotal} signals</div>
<div style={{ fontSize: 10, color: '#666666' }}>{bullWins} wins</div>
<div style={{ fontSize: 12 }}>{bullTotal} 个信号</div>
<div style={{ fontSize: 10, color: '#666666' }}>{bullWins} 次命中</div>
{bullUnknown > 0 && (
<div style={{ fontSize: 10, color: '#999999' }}>{bullUnknown} unknown</div>
<div style={{ fontSize: 10, color: '#999999' }}>{bullUnknown} 条未判定</div>
)}
</td>
<td style={{ color: bullWinRate != null ? (bullWinRate >= 0.5 ? '#00C853' : '#999999') : '#999999' }}>
{bullWinRate != null ? `${(bullWinRate * 100).toFixed(1)}%` : 'N/A'}
{bullWinRate != null ? `${(bullWinRate * 100).toFixed(1)}%` : '暂无'}
</td>
<td>
<div style={{ fontSize: 12 }}>{bearTotal} signals</div>
<div style={{ fontSize: 10, color: '#666666' }}>{bearWins} wins</div>
<div style={{ fontSize: 12 }}>{bearTotal} 个信号</div>
<div style={{ fontSize: 10, color: '#666666' }}>{bearWins} 次命中</div>
{bearUnknown > 0 && (
<div style={{ fontSize: 10, color: '#999999' }}>{bearUnknown} unknown</div>
<div style={{ fontSize: 10, color: '#999999' }}>{bearUnknown} 条未判定</div>
)}
</td>
<td style={{ color: bearWinRate != null ? (bearWinRate >= 0.5 ? '#00C853' : '#999999') : '#999999' }}>
{bearWinRate != null ? `${(bearWinRate * 100).toFixed(1)}%` : 'N/A'}
{bearWinRate != null ? `${(bearWinRate * 100).toFixed(1)}%` : '暂无'}
</td>
<td style={{ fontWeight: 700 }}>{totalSignals}</td>
</tr>
@@ -102,7 +102,7 @@ export default function PerformanceView({ leaderboard }) {
{rankedAgents.length > 0 && rankedAgents.some(agent => agent.signals && agent.signals.length > 0) && (
<div className="section" style={{ marginTop: 32 }}>
<div className="section-header">
<h2 className="section-title">Signal History</h2>
<h2 className="section-title">信号历史</h2>
</div>
<div style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fit, minmax(400px, 1fr))', gap: 20 }}>
@@ -150,7 +150,7 @@ export default function PerformanceView({ leaderboard }) {
const hasRealReturn = typeof realReturnValue === 'number' && Number.isFinite(realReturnValue);
const realReturnDisplay = hasRealReturn
? `${realReturnValue >= 0 ? '+' : ''}${(realReturnValue * 100).toFixed(2)}%`
: 'Unknown';
: '未判定';
const realReturnColor = hasRealReturn
? (realReturnValue >= 0 ? '#00C853' : '#FF1744')
: '#999999';
@@ -189,7 +189,7 @@ export default function PerformanceView({ leaderboard }) {
color: isBull ? '#00C853' : isBear ? '#FF1744' : '#999999',
fontSize: 12
}}>
{isBull ? 'Bull' : isBear ? 'Bear' : 'Neutral'}
{isBull ? '看涨' : isBear ? '看跌' : '中性'}
</span>
{!isNeutral && (
<span style={{
@@ -222,7 +222,7 @@ export default function PerformanceView({ leaderboard }) {
color: '#666666',
textAlign: 'center'
}}>
Total: {sortedSignals.length} signals
{sortedSignals.length} 条信号
</div>
</div>
);
@@ -233,4 +233,3 @@ export default function PerformanceView({ leaderboard }) {
</div>
);
}

View File

@@ -2,7 +2,7 @@ import React, { useState, useEffect, useRef } from 'react';
import { LLM_MODEL_LOGOS } from '../config/constants';
export default function RulesView() {
const [language, setLanguage] = useState('en'); // 'en' or 'zh'
const [language] = useState('zh');
const [scale, setScale] = useState(1);
const containerRef = useRef(null);
const contentRef = useRef(null);
@@ -197,19 +197,19 @@ export default function RulesView() {
section2Title: "Agent 决策机制",
tradingProcess: "交易流程",
tradingDesc: "Agents 进行日频交易并持续跟踪组合净值。每天最终交易决策前,agents 经历三个关键阶段:",
tradingDesc: "智能体以日频进行交易并持续跟踪组合净值。每天最终交易决策前,经历三个关键阶段:",
analysisPhase: "• 分析阶段",
analysisDesc: "所有 agents 根据各自的工具和信息独立分析并形成判断。",
analysisDesc: "所有智能体根据各自的工具和信息独立分析并形成判断。",
communicationPhase: "• 沟通阶段",
commIntro: "提供了多种沟通渠道1v1 私聊 / 1vN 通知 / NvN 会议",
decisionPhase: "• 决策阶段",
decisionDesc: "由 portfolio manager 汇总所有信息,并给出最终的团队交易。analysts 给出的原始交易信号仅个人维度排名。",
decisionDesc: "由投资经理汇总所有信息,并给出最终的团队交易决策。分析师给出的原始交易信号仅用于个人维度排名。",
reflectionTitle: "学习与进化",
reflectionDesc: "Agents 根据当日实际收益反思决策、总结经验,并存入 ",
reflectionDesc: "智能体根据当日实际收益反思决策、总结经验,并存入 ",
remeLink: "ReMe",
reflectionDesc2: " 记忆框架以持续改进。",
@@ -219,14 +219,14 @@ export default function RulesView() {
chartDesc: "追踪组合收益曲线 vs. 基准策略(等权、市值加权、动量)。用于评估整体策略有效性。",
rankingTitle: "• 分析师排名",
rankingDesc: "在 Trading Room 点击头像查看分析师表现(胜率、牛/熊市胜率)。用于了解哪些分析师提供最有价值的洞察。",
rankingDesc: "在交易室点击头像查看分析师表现(胜率、牛/熊市胜率),用来了解哪些分析师提供最有价值的洞察。",
statsTitle: "• 统计数据",
statsDesc: "详细的持仓和交易历史。用于深入分析仓位管理和执行质量。",
callToAction: "在 ",
callToAction: "在 ",
repoLink: "GitHub",
callToAction2: " 上 fork 并自定义!"
callToAction2: " 上 Fork 并自行定制。"
}
};
@@ -234,136 +234,14 @@ export default function RulesView() {
<div ref={containerRef} style={containerStyle}>
<div ref={contentRef} style={contentWrapperStyle}>
<div style={innerContentStyle}>
{/* Language Switch */}
<div style={languageSwitchStyle}>
<span
style={getLangStyle(language === 'zh')}
onClick={() => setLanguage('zh')}
style={getLangStyle(true)}
>
中文
</span>
<span style={{ padding: '0 4px', color: '#999' }}></span>
<span
style={getLangStyle(language === 'en')}
onClick={() => setLanguage('en')}
>
EN
</span>
</div>
{language === 'en' ? (
// English Content
<>
{/* Section 1: Agent Setup */}
<div style={sectionTitleStyle}>{content.en.section1Title}</div>
{/* Roles */}
<div style={{ marginBottom: '8px', fontSize: '12px' }}>
<div style={{ marginBottom: '3px' }}>
<span style={{ fontWeight: 600 }}>{content.en.pmRole}:</span> {content.en.pmDesc}
</div>
<div style={{ marginBottom: '3px' }}>
<span style={{ fontWeight: 600 }}>{content.en.rmRole}:</span> {content.en.rmDesc}
</div>
<div style={{ marginBottom: '3px' }}>
<span style={{ fontWeight: 600 }}>{content.en.analystsRole}:</span> {content.en.analystsDesc}
</div>
</div>
{/* Analysts with AI Models */}
<div style={{ marginLeft: '10px', marginBottom: '8px', display: 'grid', gridTemplateColumns: '1fr 1fr', gap: '3px 14px', fontSize: '11px' }}>
{content.en.analysts.map(analyst => {
const logo = llmLogos.find(l => l.name === analyst.modelKey);
return (
<div key={analyst.name} style={{
display: 'flex',
alignItems: 'center',
gap: '8px'
}}>
{logo && (
<img
src={logo.url}
alt={logo.label}
style={{
height: '16px',
width: 'auto',
objectFit: 'contain'
}}
/>
)}
<span style={{ fontWeight: 600 }}>{analyst.name}</span>
<span style={{ color: '#666' }}>- {analyst.model}</span>
</div>
);
})}
</div>
<div style={{ marginBottom: '10px', fontSize: '11px', fontStyle: 'italic', opacity: 0.8 }}>
{content.en.callToAction}
<a
href="https://github.com/agentscope-ai/agentscope-samples"
target="_blank"
rel="noopener noreferrer"
style={linkStyle}
>
{content.en.repoLink}
</a>
{content.en.callToAction2}
</div>
{/* Section 2: Agent Decision Mechanism */}
<div style={sectionTitleStyle}>{content.en.section2Title}</div>
<div style={{ marginBottom: '6px' }}>
<div style={{ fontWeight: 600, marginBottom: '3px' }}>{content.en.tradingProcess}</div>
<div style={{ marginBottom: '6px', fontSize: '12px' }}>{content.en.tradingDesc}</div>
<div style={subsectionStyle}>
<div style={{ marginBottom: '4px', fontSize: '12px' }}>
<span style={highlight}>{content.en.analysisPhase.replace('• ', '')}:</span> {content.en.analysisDesc}
</div>
<div style={{ marginBottom: '4px', fontSize: '12px' }}>
<span style={highlight}>{content.en.communicationPhase.replace('• ', '')}:</span> {content.en.commIntro}
</div>
<div style={{ fontSize: '12px' }}>
<span style={highlight}>{content.en.decisionPhase.replace('• ', '')}:</span> {content.en.decisionDesc}
</div>
</div>
</div>
<div style={{ marginBottom: '10px' }}>
<div style={{ fontWeight: 600, marginBottom: '3px' }}>{content.en.reflectionTitle}</div>
<div style={{ fontSize: '12px' }}>
{content.en.reflectionDesc}
<a
href="https://github.com/agentscope-ai/ReMe"
target="_blank"
rel="noopener noreferrer"
style={linkStyle}
>
{content.en.remeLink}
</a>
{content.en.reflectionDesc2}
</div>
</div>
{/* Section 3: Performance Evaluation */}
<div style={sectionTitleStyle}>{content.en.section3Title}</div>
<div style={subsectionStyle}>
<div style={{ marginBottom: '3px', fontSize: '12px' }}>
<span style={{ fontWeight: 600 }}>{content.en.chartTitle.replace('• ', '')}:</span> {content.en.chartDesc}
</div>
<div style={{ marginBottom: '3px', fontSize: '12px' }}>
<span style={{ fontWeight: 600 }}>{content.en.rankingTitle.replace('• ', '')}:</span> {content.en.rankingDesc}
</div>
<div style={{ fontSize: '12px' }}>
<span style={{ fontWeight: 600 }}>{content.en.statsTitle.replace('• ', '')}:</span> {content.en.statsDesc}
</div>
</div>
</>
) : (
// Chinese Content
<>
{/* 第一部分Agent 设定 */}
@@ -475,7 +353,6 @@ export default function RulesView() {
</div>
</div>
</>
)}
</div>
</div>
</div>

View File

@@ -243,7 +243,7 @@ export default function StatisticsView({ trades, holdings, stats, baseline_vw, e
}}>
{pmWinRateData?.winRate != null
? `${(pmWinRateData.winRate * 100).toFixed(1)}%`
: 'N/A'}
: '暂无'}
</div>
{pmWinRateData && (
<div style={{

View File

@@ -132,7 +132,7 @@ export const AXIS_TICKS = 5;
// WebSocket configuration
export const WS_URL = import.meta.env.VITE_WS_URL || "ws://localhost:8765";
// Initial ticker symbols (MAG7 companies)
// Initial ticker symbols for the production watchlist
export const INITIAL_TICKERS = [
{ symbol: "AAPL", price: null, change: null },
{ symbol: "MSFT", price: null, change: null },
@@ -140,6 +140,10 @@ export const INITIAL_TICKERS = [
{ symbol: "AMZN", price: null, change: null },
{ symbol: "NVDA", price: null, change: null },
{ symbol: "META", price: null, change: null },
{ symbol: "TSLA", price: null, change: null }
{ symbol: "TSLA", price: null, change: null },
{ symbol: "AMD", price: null, change: null },
{ symbol: "NFLX", price: null, change: null },
{ symbol: "AVGO", price: null, change: null },
{ symbol: "PLTR", price: null, change: null },
{ symbol: "COIN", price: null, change: null }
];

View File

@@ -51,7 +51,7 @@ export class ReadOnlyClient {
this.reconnectAttempts = 0;
this.reconnectDelay = this.baseReconnectDelay;
this.lastPongTime = Date.now();
this._safeEmit({ type: "system", content: "Connected to live server" });
this._safeEmit({ type: "system", content: "已连接实时数据服务" });
console.log("WebSocket connected");
this._startHeartbeat();
};
@@ -79,7 +79,7 @@ export class ReadOnlyClient {
};
this.ws.onclose = (event) => {
const code = event.code || "Unknown";
const code = event.code || "未知";
console.log(`[WebSocket] Connection closed: Code=${code}, WasClean=${event.wasClean}`);
this._stopHeartbeat();
@@ -96,7 +96,7 @@ export class ReadOnlyClient {
this._safeEmit({
type: "system",
content: "Try to connect to data server..."
content: "正在尝试连接数据服务..."
});
if (this.reconnectTimer) {
@@ -189,4 +189,3 @@ export class ReadOnlyClient {
this.ws = null;
}
}

View File

@@ -18,8 +18,8 @@ export function getModelIcon(modelName, modelProvider) {
logoPath: null,
color: "#666666",
bgColor: "#f5f5f5",
label: "Default",
provider: "Default"
label: "默认",
provider: "默认"
};
}
@@ -192,7 +192,7 @@ export function getModelIcon(modelName, modelProvider) {
color: "#666666",
bgColor: "#f5f5f5",
label: modelName.substring(0, 15),
provider: provider || "Unknown"
provider: provider || "未知"
};
}
@@ -203,7 +203,7 @@ export function getModelIcon(modelName, modelProvider) {
*/
export function getShortModelName(modelName) {
if (!modelName) {
return "N/A";
return "暂无";
}
const name = modelName.toLowerCase();
@@ -393,4 +393,3 @@ export function getShortModelName(modelName) {
// Return formatted original name
return capitalizeWords(modelName);
}

15
scripts/run_prod.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
set -euo pipefail
cd /root/code/evotraders
export PYTHONPATH=/root/code/evotraders/.pydeps:.
export TICKERS="${TICKERS:-AAPL,MSFT,GOOGL,AMZN,NVDA,META,TSLA,AMD,NFLX,AVGO,PLTR,COIN}"
exec python3 -m backend.main \
--mode live \
--config-name production \
--host 127.0.0.1 \
--port 8765 \
--trigger-time now \
--poll-interval 15