Initial commit of integrated agent system
This commit is contained in:
0
backend/tools/__init__.py
Normal file
0
backend/tools/__init__.py
Normal file
1138
backend/tools/analysis_tools.py
Normal file
1138
backend/tools/analysis_tools.py
Normal file
File diff suppressed because it is too large
Load Diff
459
backend/tools/data_tools.py
Normal file
459
backend/tools/data_tools.py
Normal file
@@ -0,0 +1,459 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# flake8: noqa: E501
|
||||
# pylint: disable=C0301
|
||||
"""Data fetching tools backed by the unified provider router."""
|
||||
import datetime
|
||||
import os
|
||||
|
||||
import httpx
|
||||
import pandas as pd
|
||||
import pandas_market_calendars as mcal
|
||||
from backend.data.provider_utils import normalize_symbol
|
||||
|
||||
from backend.data.cache import get_cache
|
||||
from backend.data.provider_router import get_provider_router
|
||||
from shared.schema import (
|
||||
CompanyNews,
|
||||
FinancialMetrics,
|
||||
InsiderTrade,
|
||||
LineItem,
|
||||
Price,
|
||||
)
|
||||
from backend.utils.settlement import logger
|
||||
|
||||
# Global cache instance
|
||||
_cache = get_cache()
|
||||
_router = get_provider_router()
|
||||
|
||||
|
||||
def _service_name() -> str:
|
||||
return str(os.getenv("SERVICE_NAME", "")).strip().lower()
|
||||
|
||||
|
||||
def _trading_service_url() -> str | None:
|
||||
value = str(os.getenv("TRADING_SERVICE_URL", "")).strip().rstrip("/")
|
||||
if not value or _service_name() == "trading_service":
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def _news_service_url() -> str | None:
|
||||
value = str(os.getenv("NEWS_SERVICE_URL", "")).strip().rstrip("/")
|
||||
if not value or _service_name() == "news_service":
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def _service_get_json(base_url: str, path: str, *, params: dict[str, object]) -> dict:
|
||||
with httpx.Client(base_url=base_url, timeout=30.0) as client:
|
||||
response = client.get(path, params=params)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
def get_last_tradeday(date: str) -> str:
|
||||
"""
|
||||
Get the previous trading day for the specified date
|
||||
|
||||
Args:
|
||||
date: Date string (YYYY-MM-DD)
|
||||
|
||||
Returns:
|
||||
Previous trading day date string (YYYY-MM-DD)
|
||||
"""
|
||||
current_date = datetime.datetime.strptime(date, "%Y-%m-%d")
|
||||
_NYSE_CALENDAR = mcal.get_calendar("NYSE")
|
||||
|
||||
if _NYSE_CALENDAR is not None:
|
||||
# Get trading days before current date
|
||||
# Go back 90 days from current date to get all trading days
|
||||
start_search = current_date - datetime.timedelta(days=90)
|
||||
|
||||
if hasattr(_NYSE_CALENDAR, "valid_days"):
|
||||
# pandas_market_calendars
|
||||
trading_dates = _NYSE_CALENDAR.valid_days(
|
||||
start_date=start_search.strftime("%Y-%m-%d"),
|
||||
end_date=current_date.strftime("%Y-%m-%d"),
|
||||
)
|
||||
else:
|
||||
# exchange_calendars
|
||||
trading_dates = _NYSE_CALENDAR.sessions_in_range(
|
||||
start_search.strftime("%Y-%m-%d"),
|
||||
current_date.strftime("%Y-%m-%d"),
|
||||
)
|
||||
|
||||
# Convert to date list
|
||||
trading_dates_list = [
|
||||
pd.Timestamp(d).strftime("%Y-%m-%d") for d in trading_dates
|
||||
]
|
||||
|
||||
# Find current date position in the list
|
||||
if date in trading_dates_list:
|
||||
# If current date is a trading day, return previous trading day
|
||||
idx = trading_dates_list.index(date)
|
||||
if idx > 0:
|
||||
return trading_dates_list[idx - 1]
|
||||
else:
|
||||
# If it's the first trading day, go back further
|
||||
prev_date = current_date - datetime.timedelta(days=1)
|
||||
return get_last_tradeday(prev_date.strftime("%Y-%m-%d"))
|
||||
else:
|
||||
# If current date is not a trading day, return the nearest trading day
|
||||
if trading_dates_list:
|
||||
return trading_dates_list[-1]
|
||||
|
||||
return prev_date.strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
def get_prices(
|
||||
ticker: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
) -> list[Price]:
|
||||
"""
|
||||
Fetch price data from cache or API.
|
||||
|
||||
Uses centralized data source configuration (FINNHUB_API_KEY prioritized).
|
||||
|
||||
Args:
|
||||
ticker: Stock ticker symbol
|
||||
start_date: Start date (YYYY-MM-DD)
|
||||
end_date: End date (YYYY-MM-DD)
|
||||
|
||||
Returns:
|
||||
list[Price]: List of Price objects
|
||||
"""
|
||||
ticker = normalize_symbol(ticker)
|
||||
if not ticker:
|
||||
return []
|
||||
cached_sources = _router.price_sources()
|
||||
for source in cached_sources:
|
||||
cache_key = f"{ticker}_{start_date}_{end_date}_{source}"
|
||||
if cached_data := _cache.get_prices(cache_key):
|
||||
return [Price(**price) for price in cached_data]
|
||||
|
||||
service_url = _trading_service_url()
|
||||
if service_url:
|
||||
try:
|
||||
payload = _service_get_json(
|
||||
service_url,
|
||||
"/api/prices",
|
||||
params={
|
||||
"ticker": ticker,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
},
|
||||
)
|
||||
prices = [Price(**price) for price in payload.get("prices", [])]
|
||||
if prices:
|
||||
return prices
|
||||
except Exception as exc:
|
||||
logger.info("Trading service price lookup failed for %s: %s", ticker, exc)
|
||||
|
||||
try:
|
||||
prices, data_source = _router.get_prices(ticker, start_date, end_date)
|
||||
except Exception as exc:
|
||||
logger.info("Price lookup failed for %s: %s", ticker, exc)
|
||||
return []
|
||||
|
||||
if not prices:
|
||||
return []
|
||||
|
||||
cache_key = f"{ticker}_{start_date}_{end_date}_{data_source}"
|
||||
_cache.set_prices(cache_key, [p.model_dump() for p in prices])
|
||||
return prices
|
||||
|
||||
|
||||
def get_financial_metrics(
|
||||
ticker: str,
|
||||
end_date: str,
|
||||
period: str = "ttm",
|
||||
limit: int = 10,
|
||||
) -> list[FinancialMetrics]:
|
||||
"""
|
||||
Fetch financial metrics from cache or API.
|
||||
|
||||
Uses centralized data source configuration (FINNHUB_API_KEY prioritized).
|
||||
|
||||
Args:
|
||||
ticker: Stock ticker symbol
|
||||
end_date: End date (YYYY-MM-DD)
|
||||
period: Period type (default: "ttm")
|
||||
limit: Number of records to fetch
|
||||
|
||||
Returns:
|
||||
list[FinancialMetrics]: List of financial metrics
|
||||
"""
|
||||
ticker = normalize_symbol(ticker)
|
||||
if not ticker:
|
||||
return []
|
||||
for source in _router.api_sources():
|
||||
cache_key = f"{ticker}_{period}_{end_date}_{limit}_{source}"
|
||||
if cached_data := _cache.get_financial_metrics(cache_key):
|
||||
return [FinancialMetrics(**metric) for metric in cached_data]
|
||||
|
||||
service_url = _trading_service_url()
|
||||
if service_url:
|
||||
try:
|
||||
payload = _service_get_json(
|
||||
service_url,
|
||||
"/api/financials",
|
||||
params={
|
||||
"ticker": ticker,
|
||||
"end_date": end_date,
|
||||
"period": period,
|
||||
"limit": limit,
|
||||
},
|
||||
)
|
||||
metrics = [
|
||||
FinancialMetrics(**metric)
|
||||
for metric in payload.get("financial_metrics", [])
|
||||
]
|
||||
if metrics:
|
||||
return metrics
|
||||
except Exception as exc:
|
||||
logger.info("Trading service financial lookup failed for %s: %s", ticker, exc)
|
||||
|
||||
try:
|
||||
financial_metrics, data_source = _router.get_financial_metrics(
|
||||
ticker=ticker,
|
||||
end_date=end_date,
|
||||
period=period,
|
||||
limit=limit,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.info("Financial metrics lookup failed for %s: %s", ticker, exc)
|
||||
return []
|
||||
|
||||
if not financial_metrics:
|
||||
return []
|
||||
|
||||
cache_key = f"{ticker}_{period}_{end_date}_{limit}_{data_source}"
|
||||
_cache.set_financial_metrics(
|
||||
cache_key,
|
||||
[m.model_dump() for m in financial_metrics],
|
||||
)
|
||||
return financial_metrics
|
||||
|
||||
def search_line_items(
|
||||
ticker: str,
|
||||
line_items: list[str],
|
||||
end_date: str,
|
||||
period: str = "ttm",
|
||||
limit: int = 10,
|
||||
) -> list[LineItem]:
|
||||
"""
|
||||
Fetch line items from Financial Datasets API (only supported source).
|
||||
|
||||
Returns empty list on API errors to allow graceful degradation.
|
||||
"""
|
||||
try:
|
||||
ticker = normalize_symbol(ticker)
|
||||
if not ticker:
|
||||
return []
|
||||
|
||||
service_url = _trading_service_url()
|
||||
if service_url:
|
||||
payload = _service_get_json(
|
||||
service_url,
|
||||
"/api/line-items",
|
||||
params={
|
||||
"ticker": ticker,
|
||||
"line_items": line_items,
|
||||
"end_date": end_date,
|
||||
"period": period,
|
||||
"limit": limit,
|
||||
},
|
||||
)
|
||||
return [LineItem(**item) for item in payload.get("search_results", [])]
|
||||
|
||||
return _router.search_line_items(
|
||||
ticker=ticker,
|
||||
line_items=line_items,
|
||||
end_date=end_date,
|
||||
period=period,
|
||||
limit=limit,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Warning: Exception while fetching line items for {ticker}: {str(e)}",
|
||||
)
|
||||
return []
|
||||
|
||||
def get_insider_trades(
|
||||
ticker: str,
|
||||
end_date: str,
|
||||
start_date: str | None = None,
|
||||
limit: int = 1000,
|
||||
) -> list[InsiderTrade]:
|
||||
"""Fetch insider trades from cache or API."""
|
||||
ticker = normalize_symbol(ticker)
|
||||
if not ticker:
|
||||
return []
|
||||
for source in _router.api_sources():
|
||||
cache_key = (
|
||||
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{source}"
|
||||
)
|
||||
if cached_data := _cache.get_insider_trades(cache_key):
|
||||
return [InsiderTrade(**trade) for trade in cached_data]
|
||||
|
||||
service_url = _trading_service_url()
|
||||
if service_url:
|
||||
try:
|
||||
params = {"ticker": ticker, "end_date": end_date, "limit": limit}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
payload = _service_get_json(
|
||||
service_url,
|
||||
"/api/insider-trades",
|
||||
params=params,
|
||||
)
|
||||
trades = [
|
||||
InsiderTrade(**trade)
|
||||
for trade in payload.get("insider_trades", [])
|
||||
]
|
||||
if trades:
|
||||
return trades
|
||||
except Exception as exc:
|
||||
logger.info("Trading service insider lookup failed for %s: %s", ticker, exc)
|
||||
|
||||
try:
|
||||
all_trades, data_source = _router.get_insider_trades(
|
||||
ticker=ticker,
|
||||
end_date=end_date,
|
||||
start_date=start_date,
|
||||
limit=limit,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.info("Insider trades lookup failed for %s: %s", ticker, exc)
|
||||
return []
|
||||
|
||||
if not all_trades:
|
||||
return []
|
||||
|
||||
cache_key = f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{data_source}"
|
||||
_cache.set_insider_trades(cache_key, [trade.model_dump() for trade in all_trades])
|
||||
return all_trades
|
||||
|
||||
def get_company_news(
|
||||
ticker: str,
|
||||
end_date: str,
|
||||
start_date: str | None = None,
|
||||
limit: int = 1000,
|
||||
) -> list[CompanyNews]:
|
||||
"""Fetch company news from cache or API."""
|
||||
ticker = normalize_symbol(ticker)
|
||||
if not ticker:
|
||||
return []
|
||||
for source in _router.api_sources():
|
||||
cache_key = (
|
||||
f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{source}"
|
||||
)
|
||||
if cached_data := _cache.get_company_news(cache_key):
|
||||
return [CompanyNews(**news) for news in cached_data]
|
||||
|
||||
trading_service_url = _trading_service_url()
|
||||
if trading_service_url:
|
||||
try:
|
||||
params = {"ticker": ticker, "end_date": end_date, "limit": limit}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
payload = _service_get_json(
|
||||
trading_service_url,
|
||||
"/api/news",
|
||||
params=params,
|
||||
)
|
||||
news = [CompanyNews(**item) for item in payload.get("news", [])]
|
||||
if news:
|
||||
return news
|
||||
except Exception as exc:
|
||||
logger.info("Trading service news lookup failed for %s: %s", ticker, exc)
|
||||
|
||||
news_service_url = _news_service_url()
|
||||
if news_service_url:
|
||||
try:
|
||||
params = {"ticker": ticker, "end_date": end_date, "limit": limit}
|
||||
if start_date:
|
||||
params["start_date"] = start_date
|
||||
payload = _service_get_json(
|
||||
news_service_url,
|
||||
"/api/enriched-news",
|
||||
params=params,
|
||||
)
|
||||
news = [CompanyNews(**item) for item in payload.get("news", [])]
|
||||
if news:
|
||||
return news
|
||||
except Exception as exc:
|
||||
logger.info("News service lookup failed for %s: %s", ticker, exc)
|
||||
|
||||
try:
|
||||
all_news, data_source = _router.get_company_news(
|
||||
ticker=ticker,
|
||||
end_date=end_date,
|
||||
start_date=start_date,
|
||||
limit=limit,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.info("Company news lookup failed for %s: %s", ticker, exc)
|
||||
return []
|
||||
|
||||
if not all_news:
|
||||
return []
|
||||
|
||||
cache_key = f"{ticker}_{start_date or 'none'}_{end_date}_{limit}_{data_source}"
|
||||
_cache.set_company_news(cache_key, [news.model_dump() for news in all_news])
|
||||
return all_news
|
||||
|
||||
def get_market_cap(ticker: str, end_date: str) -> float | None:
|
||||
"""Fetch market cap from the API. Finnhub values are converted from millions."""
|
||||
ticker = normalize_symbol(ticker)
|
||||
if not ticker:
|
||||
return None
|
||||
|
||||
service_url = _trading_service_url()
|
||||
if service_url:
|
||||
try:
|
||||
payload = _service_get_json(
|
||||
service_url,
|
||||
"/api/market-cap",
|
||||
params={"ticker": ticker, "end_date": end_date},
|
||||
)
|
||||
value = payload.get("market_cap")
|
||||
return float(value) if value is not None else None
|
||||
except Exception as exc:
|
||||
logger.info("Trading service market-cap lookup failed for %s: %s", ticker, exc)
|
||||
|
||||
def _metrics_lookup(symbol: str, date: str):
|
||||
for source in _router.api_sources():
|
||||
cache_key = f"{symbol}_ttm_{date}_10_{source}"
|
||||
if cached_data := _cache.get_financial_metrics(cache_key):
|
||||
return [FinancialMetrics(**metric) for metric in cached_data], source
|
||||
return _router.get_financial_metrics(
|
||||
ticker=symbol,
|
||||
end_date=date,
|
||||
period="ttm",
|
||||
limit=10,
|
||||
)
|
||||
|
||||
try:
|
||||
market_cap, _ = _router.get_market_cap(
|
||||
ticker=ticker,
|
||||
end_date=end_date,
|
||||
metrics_lookup=_metrics_lookup,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.info("Market cap lookup failed for %s: %s", ticker, exc)
|
||||
return None
|
||||
return market_cap
|
||||
|
||||
|
||||
def prices_to_df(prices: list[Price]) -> pd.DataFrame:
|
||||
"""Convert prices to a DataFrame."""
|
||||
df = pd.DataFrame([p.model_dump() for p in prices])
|
||||
df["Date"] = pd.to_datetime(df["time"])
|
||||
df.set_index("Date", inplace=True)
|
||||
numeric_cols = ["open", "close", "high", "low", "volume"]
|
||||
for col in numeric_cols:
|
||||
df[col] = pd.to_numeric(df[col], errors="coerce")
|
||||
df.sort_index(inplace=True)
|
||||
return df
|
||||
218
backend/tools/risk_tools.py
Normal file
218
backend/tools/risk_tools.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Risk management tools for the risk manager agent."""
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Iterable, List
|
||||
|
||||
from agentscope.message import TextBlock
|
||||
from agentscope.tool import ToolResponse
|
||||
|
||||
|
||||
def _to_text_response(text: str) -> ToolResponse:
|
||||
return ToolResponse(content=[TextBlock(type="text", text=text)])
|
||||
|
||||
|
||||
def _parse_object(payload: Any) -> Dict[str, Any]:
|
||||
if payload is None:
|
||||
return {}
|
||||
if isinstance(payload, dict):
|
||||
return payload
|
||||
if isinstance(payload, str):
|
||||
try:
|
||||
parsed = json.loads(payload)
|
||||
return parsed if isinstance(parsed, dict) else {}
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
return {}
|
||||
|
||||
|
||||
def _parse_prices(payload: Any) -> Dict[str, float]:
|
||||
parsed = _parse_object(payload)
|
||||
prices = {}
|
||||
for ticker, value in parsed.items():
|
||||
try:
|
||||
prices[str(ticker)] = float(value)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
return prices
|
||||
|
||||
|
||||
def _iter_positions(
|
||||
portfolio: Dict[str, Any],
|
||||
prices: Dict[str, float],
|
||||
) -> Iterable[Dict[str, Any]]:
|
||||
positions = portfolio.get("positions", {})
|
||||
for ticker, raw_position in positions.items():
|
||||
if not isinstance(raw_position, dict):
|
||||
continue
|
||||
price = prices.get(ticker, 0.0)
|
||||
long_qty = int(raw_position.get("long", 0) or 0)
|
||||
short_qty = int(raw_position.get("short", 0) or 0)
|
||||
long_value = long_qty * price
|
||||
short_value = short_qty * price
|
||||
net_value = long_value - short_value
|
||||
gross_value = long_value + short_value
|
||||
yield {
|
||||
"ticker": ticker,
|
||||
"price": price,
|
||||
"long_qty": long_qty,
|
||||
"short_qty": short_qty,
|
||||
"long_value": long_value,
|
||||
"short_value": short_value,
|
||||
"net_value": net_value,
|
||||
"gross_value": gross_value,
|
||||
}
|
||||
|
||||
|
||||
def _portfolio_equity(portfolio: Dict[str, Any], prices: Dict[str, float]) -> float:
|
||||
cash = float(portfolio.get("cash", 0.0) or 0.0)
|
||||
margin_used = float(portfolio.get("margin_used", 0.0) or 0.0)
|
||||
total = cash + margin_used
|
||||
for position in _iter_positions(portfolio, prices):
|
||||
total += position["net_value"]
|
||||
return total
|
||||
|
||||
|
||||
def assess_position_concentration(
|
||||
portfolio: Dict[str, Any] | str,
|
||||
current_prices: Dict[str, float] | str,
|
||||
) -> ToolResponse:
|
||||
"""
|
||||
Assess single-name concentration and gross exposure in the current portfolio.
|
||||
|
||||
Args:
|
||||
portfolio: Portfolio state with cash, positions, and margin fields.
|
||||
current_prices: Current price map by ticker.
|
||||
"""
|
||||
portfolio_obj = _parse_object(portfolio)
|
||||
prices = _parse_prices(current_prices)
|
||||
equity = _portfolio_equity(portfolio_obj, prices)
|
||||
|
||||
if equity <= 0:
|
||||
return _to_text_response("Unable to assess concentration: portfolio equity is non-positive.")
|
||||
|
||||
exposures: List[Dict[str, Any]] = sorted(
|
||||
_iter_positions(portfolio_obj, prices),
|
||||
key=lambda item: abs(item["net_value"]),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
if not exposures:
|
||||
return _to_text_response(
|
||||
"No open positions. Concentration risk is low because the portfolio is fully in cash."
|
||||
)
|
||||
|
||||
lines = ["=== Position Concentration Assessment ==="]
|
||||
gross_exposure = sum(item["gross_value"] for item in exposures)
|
||||
net_exposure = sum(item["net_value"] for item in exposures)
|
||||
lines.append(f"Portfolio equity: ${equity:,.2f}")
|
||||
lines.append(f"Gross exposure: ${gross_exposure:,.2f} ({gross_exposure / equity:.1%} of equity)")
|
||||
lines.append(f"Net exposure: ${net_exposure:,.2f} ({net_exposure / equity:.1%} of equity)")
|
||||
lines.append("Largest positions by net exposure:")
|
||||
|
||||
for item in exposures[:5]:
|
||||
weight = item["net_value"] / equity
|
||||
gross_weight = item["gross_value"] / equity
|
||||
direction = "NET LONG" if item["net_value"] >= 0 else "NET SHORT"
|
||||
lines.append(
|
||||
f"- {item['ticker']}: {direction}, net ${item['net_value']:,.2f} ({weight:.1%}), "
|
||||
f"gross ${item['gross_value']:,.2f} ({gross_weight:.1%})"
|
||||
)
|
||||
|
||||
top_weight = abs(exposures[0]["net_value"]) / equity
|
||||
if top_weight >= 0.30:
|
||||
lines.append("Risk flag: concentration is HIGH because the largest single-name exposure exceeds 30% of equity.")
|
||||
elif top_weight >= 0.20:
|
||||
lines.append("Risk flag: concentration is MODERATE because the largest single-name exposure exceeds 20% of equity.")
|
||||
else:
|
||||
lines.append("Risk flag: concentration is currently contained at the single-name level.")
|
||||
|
||||
return _to_text_response("\n".join(lines))
|
||||
|
||||
|
||||
def assess_margin_and_liquidity(
|
||||
portfolio: Dict[str, Any] | str,
|
||||
current_prices: Dict[str, float] | str,
|
||||
) -> ToolResponse:
|
||||
"""
|
||||
Assess available cash, margin usage, and short exposure pressure.
|
||||
|
||||
Args:
|
||||
portfolio: Portfolio state with cash, positions, and margin fields.
|
||||
current_prices: Current price map by ticker.
|
||||
"""
|
||||
portfolio_obj = _parse_object(portfolio)
|
||||
prices = _parse_prices(current_prices)
|
||||
equity = _portfolio_equity(portfolio_obj, prices)
|
||||
cash = float(portfolio_obj.get("cash", 0.0) or 0.0)
|
||||
margin_used = float(portfolio_obj.get("margin_used", 0.0) or 0.0)
|
||||
margin_requirement = float(portfolio_obj.get("margin_requirement", 0.0) or 0.0)
|
||||
|
||||
short_exposure = sum(item["short_value"] for item in _iter_positions(portfolio_obj, prices))
|
||||
margin_buffer = cash - margin_used
|
||||
|
||||
lines = ["=== Margin And Liquidity Assessment ==="]
|
||||
lines.append(f"Portfolio equity: ${equity:,.2f}")
|
||||
lines.append(f"Cash available: ${cash:,.2f}")
|
||||
lines.append(f"Margin used: ${margin_used:,.2f}")
|
||||
lines.append(f"Margin requirement: {margin_requirement:.1%}")
|
||||
lines.append(f"Short exposure: ${short_exposure:,.2f}")
|
||||
lines.append(f"Margin buffer (cash - used): ${margin_buffer:,.2f}")
|
||||
|
||||
if equity > 0:
|
||||
lines.append(f"Margin used / equity: {margin_used / equity:.1%}")
|
||||
lines.append(f"Short exposure / equity: {short_exposure / equity:.1%}")
|
||||
|
||||
if margin_buffer < 0:
|
||||
lines.append("Risk flag: HIGH. Margin usage exceeds available cash buffer.")
|
||||
elif equity > 0 and margin_used / equity > 0.35:
|
||||
lines.append("Risk flag: MODERATE to HIGH. Margin usage is above 35% of equity.")
|
||||
else:
|
||||
lines.append("Risk flag: margin pressure is currently manageable.")
|
||||
|
||||
return _to_text_response("\n".join(lines))
|
||||
|
||||
|
||||
def assess_volatility_exposure(
|
||||
tickers: List[str] | str,
|
||||
current_date: str | None = None,
|
||||
) -> ToolResponse:
|
||||
"""
|
||||
Assess per-ticker volatility and risk level for the current watchlist.
|
||||
|
||||
Args:
|
||||
tickers: List of stock tickers or JSON list string.
|
||||
current_date: Analysis date in YYYY-MM-DD format.
|
||||
"""
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from backend.tools.analysis_tools import _parse_tickers, _resolved_date
|
||||
from backend.tools.data_tools import get_prices, prices_to_df
|
||||
from backend.tools.technical_signals import StockTechnicalAnalyzer
|
||||
|
||||
tickers_list = _parse_tickers(tickers)
|
||||
current_date = _resolved_date(current_date)
|
||||
end_dt = datetime.strptime(current_date, "%Y-%m-%d")
|
||||
start_date = (end_dt - timedelta(days=90)).strftime("%Y-%m-%d")
|
||||
analyzer = StockTechnicalAnalyzer()
|
||||
lines = [f"=== Volatility Exposure Assessment ({current_date}) ==="]
|
||||
|
||||
for ticker in tickers_list:
|
||||
prices = get_prices(
|
||||
ticker=ticker,
|
||||
start_date=start_date,
|
||||
end_date=current_date,
|
||||
)
|
||||
if not prices or len(prices) < 5:
|
||||
lines.append(f"- {ticker}: insufficient price data")
|
||||
continue
|
||||
signal = analyzer.analyze(ticker=ticker, df=prices_to_df(prices))
|
||||
lines.append(
|
||||
f"- {ticker}: annualized volatility {signal.annualized_volatility_pct:.1f}%, "
|
||||
f"RSI14 {signal.rsi14:.1f}, trend {signal.trend}, risk level {signal.risk_level}"
|
||||
)
|
||||
|
||||
if len(lines) == 1:
|
||||
lines.append("No tickers provided.")
|
||||
|
||||
return _to_text_response("\n".join(lines))
|
||||
193
backend/tools/technical_signals.py
Normal file
193
backend/tools/technical_signals.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Structured technical signal analysis used by technical tools."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
@dataclass
|
||||
class TechnicalSignal:
|
||||
"""Structured technical analysis result for one ticker."""
|
||||
|
||||
ticker: str
|
||||
current_price: float = 0.0
|
||||
ma5: float = 0.0
|
||||
ma10: float = 0.0
|
||||
ma20: float = 0.0
|
||||
ma50: float = 0.0
|
||||
ma200: Optional[float] = None
|
||||
bias_ma5_pct: float = 0.0
|
||||
momentum_5d_pct: float = 0.0
|
||||
momentum_10d_pct: float = 0.0
|
||||
momentum_20d_pct: float = 0.0
|
||||
annualized_volatility_pct: float = 0.0
|
||||
rsi14: float = 50.0
|
||||
macd: float = 0.0
|
||||
macd_signal: float = 0.0
|
||||
bollinger_upper: float = 0.0
|
||||
bollinger_mid: float = 0.0
|
||||
bollinger_lower: float = 0.0
|
||||
trend: str = "NEUTRAL"
|
||||
mean_reversion_signal: str = "NEUTRAL"
|
||||
risk_level: str = "MODERATE RISK"
|
||||
notes: List[str] = field(default_factory=list)
|
||||
|
||||
def to_summary(self) -> Dict[str, object]:
|
||||
"""Compact dict for logs/tests."""
|
||||
return {
|
||||
"ticker": self.ticker,
|
||||
"trend": self.trend,
|
||||
"mean_reversion_signal": self.mean_reversion_signal,
|
||||
"risk_level": self.risk_level,
|
||||
"current_price": self.current_price,
|
||||
"rsi14": self.rsi14,
|
||||
"annualized_volatility_pct": self.annualized_volatility_pct,
|
||||
}
|
||||
|
||||
|
||||
class StockTechnicalAnalyzer:
|
||||
"""Lightweight technical analyzer adapted for 大时代 tools."""
|
||||
|
||||
def analyze(self, ticker: str, df: pd.DataFrame) -> TechnicalSignal:
|
||||
"""Analyze one ticker from OHLC price history."""
|
||||
result = TechnicalSignal(ticker=ticker)
|
||||
if df is None or df.empty or len(df) < 5:
|
||||
result.notes.append("Insufficient price data")
|
||||
return result
|
||||
|
||||
frame = df.sort_values("time").reset_index(drop=True).copy()
|
||||
frame["close"] = pd.to_numeric(frame["close"], errors="coerce")
|
||||
frame["returns"] = frame["close"].pct_change()
|
||||
|
||||
for window in (5, 10, 20, 50, 200):
|
||||
frame[f"MA_{window}"] = frame["close"].rolling(window).mean()
|
||||
|
||||
frame["EMA_12"] = frame["close"].ewm(span=12, adjust=False).mean()
|
||||
frame["EMA_26"] = frame["close"].ewm(span=26, adjust=False).mean()
|
||||
frame["MACD"] = frame["EMA_12"] - frame["EMA_26"]
|
||||
frame["MACD_SIGNAL"] = (
|
||||
frame["MACD"].ewm(span=9, adjust=False).mean()
|
||||
)
|
||||
|
||||
delta = frame["close"].diff()
|
||||
gain = delta.where(delta > 0, 0.0)
|
||||
loss = -delta.where(delta < 0, 0.0)
|
||||
avg_gain = gain.rolling(14).mean()
|
||||
avg_loss = loss.rolling(14).mean()
|
||||
rs = avg_gain / avg_loss.replace(0, pd.NA)
|
||||
frame["RSI_14"] = 100 - (100 / (1 + rs))
|
||||
|
||||
frame["BB_MID"] = frame["close"].rolling(20).mean()
|
||||
frame["BB_STD"] = frame["close"].rolling(20).std()
|
||||
frame["BB_UPPER"] = frame["BB_MID"] + 2 * frame["BB_STD"]
|
||||
frame["BB_LOWER"] = frame["BB_MID"] - 2 * frame["BB_STD"]
|
||||
|
||||
latest = frame.iloc[-1]
|
||||
result.current_price = _safe_number(latest["close"])
|
||||
result.ma5 = _safe_number(latest["MA_5"])
|
||||
result.ma10 = _safe_number(latest["MA_10"])
|
||||
result.ma20 = _safe_number(latest["MA_20"])
|
||||
result.ma50 = _safe_number(latest["MA_50"])
|
||||
result.ma200 = _safe_optional(latest["MA_200"])
|
||||
result.bias_ma5_pct = _percent_gap(result.current_price, result.ma5)
|
||||
result.momentum_5d_pct = _lookback_return(frame["close"], 5)
|
||||
result.momentum_10d_pct = _lookback_return(frame["close"], 10)
|
||||
result.momentum_20d_pct = _lookback_return(frame["close"], 20)
|
||||
result.annualized_volatility_pct = _safe_number(
|
||||
frame["returns"].tail(20).std() * (252**0.5) * 100,
|
||||
)
|
||||
result.rsi14 = _safe_number(latest["RSI_14"], default=50.0)
|
||||
result.macd = _safe_number(latest["MACD"])
|
||||
result.macd_signal = _safe_number(latest["MACD_SIGNAL"])
|
||||
result.bollinger_mid = _safe_number(latest["BB_MID"])
|
||||
result.bollinger_upper = _safe_number(latest["BB_UPPER"])
|
||||
result.bollinger_lower = _safe_number(latest["BB_LOWER"])
|
||||
result.trend = _classify_trend(result)
|
||||
result.mean_reversion_signal = _classify_mean_reversion(result)
|
||||
result.risk_level = _classify_risk(result.annualized_volatility_pct)
|
||||
result.notes = _build_notes(result)
|
||||
return result
|
||||
|
||||
|
||||
def _safe_number(value, default: float = 0.0) -> float:
|
||||
try:
|
||||
if pd.isna(value):
|
||||
return default
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return default
|
||||
|
||||
|
||||
def _safe_optional(value) -> Optional[float]:
|
||||
try:
|
||||
if pd.isna(value):
|
||||
return None
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _lookback_return(series: pd.Series, lookback: int) -> float:
|
||||
if len(series) <= lookback:
|
||||
return 0.0
|
||||
base = _safe_number(series.iloc[-lookback - 1])
|
||||
latest = _safe_number(series.iloc[-1])
|
||||
if base <= 0:
|
||||
return 0.0
|
||||
return ((latest / base) - 1) * 100
|
||||
|
||||
|
||||
def _percent_gap(value: float, anchor: float) -> float:
|
||||
if anchor <= 0:
|
||||
return 0.0
|
||||
return ((value - anchor) / anchor) * 100
|
||||
|
||||
|
||||
def _classify_trend(result: TechnicalSignal) -> str:
|
||||
bullish_stack = (
|
||||
result.current_price >= result.ma5 >= result.ma10 >= result.ma20 > 0
|
||||
)
|
||||
if bullish_stack and result.macd >= result.macd_signal:
|
||||
return "STRONG BULLISH"
|
||||
if bullish_stack:
|
||||
return "BULLISH"
|
||||
if result.current_price < result.ma20 and result.macd < result.macd_signal:
|
||||
return "BEARISH"
|
||||
return "NEUTRAL"
|
||||
|
||||
|
||||
def _classify_mean_reversion(result: TechnicalSignal) -> str:
|
||||
if result.rsi14 <= 30 or (
|
||||
result.bollinger_lower > 0
|
||||
and result.current_price <= result.bollinger_lower
|
||||
):
|
||||
return "OVERSOLD"
|
||||
if result.rsi14 >= 70 or (
|
||||
result.bollinger_upper > 0
|
||||
and result.current_price >= result.bollinger_upper
|
||||
):
|
||||
return "OVERBOUGHT"
|
||||
return "NEUTRAL"
|
||||
|
||||
|
||||
def _classify_risk(volatility_pct: float) -> str:
|
||||
if volatility_pct > 50:
|
||||
return "HIGH RISK"
|
||||
if volatility_pct > 25:
|
||||
return "MODERATE RISK"
|
||||
return "LOW RISK"
|
||||
|
||||
|
||||
def _build_notes(result: TechnicalSignal) -> List[str]:
|
||||
notes = []
|
||||
if abs(result.bias_ma5_pct) > 5:
|
||||
notes.append("Price extended from MA5")
|
||||
if result.macd > result.macd_signal:
|
||||
notes.append("MACD supports upside momentum")
|
||||
if result.mean_reversion_signal == "OVERSOLD":
|
||||
notes.append("Potential rebound setup")
|
||||
if result.mean_reversion_signal == "OVERBOUGHT":
|
||||
notes.append("Potential pullback setup")
|
||||
return notes
|
||||
Reference in New Issue
Block a user