Compare commits
5 Commits
a9d863073f
...
feature/ag
| Author | SHA1 | Date | |
|---|---|---|---|
| dfc8fda187 | |||
| aae4bc7d40 | |||
| 11849208ed | |||
| 62c7341cf6 | |||
| 80ce63da5a |
@@ -310,11 +310,12 @@ class EvoAgent(ToolGuardMixin, ReActAgent):
|
|||||||
)
|
)
|
||||||
logger.debug("Registered workspace watch hook")
|
logger.debug("Registered workspace watch hook")
|
||||||
|
|
||||||
async def _reasoning(self, **kwargs) -> Msg:
|
async def _reasoning(self, tool_choice: Optional[str] = None, **kwargs) -> Msg:
|
||||||
"""Override reasoning to execute pre-reasoning hooks.
|
"""Override reasoning to execute pre-reasoning hooks.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
**kwargs: Arguments for reasoning
|
tool_choice: Optional tool choice for structured output
|
||||||
|
**kwargs: Additional arguments for reasoning
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Response message
|
Response message
|
||||||
@@ -327,7 +328,7 @@ class EvoAgent(ToolGuardMixin, ReActAgent):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Call parent (which may be ToolGuardMixin's _reasoning)
|
# Call parent (which may be ToolGuardMixin's _reasoning)
|
||||||
return await super()._reasoning(**kwargs)
|
return await super()._reasoning(tool_choice=tool_choice, **kwargs)
|
||||||
|
|
||||||
def reload_runtime_assets(self, active_skill_dirs: Optional[List[Path]] = None) -> None:
|
def reload_runtime_assets(self, active_skill_dirs: Optional[List[Path]] = None) -> None:
|
||||||
"""Reload toolkit and system prompt from current run assets.
|
"""Reload toolkit and system prompt from current run assets.
|
||||||
@@ -579,7 +580,7 @@ class EvoAgent(ToolGuardMixin, ReActAgent):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._messenger = AgentMessenger(agent_id=self.agent_id)
|
self._messenger = AgentMessenger()
|
||||||
self._task_delegator = TaskDelegator(agent=self)
|
self._task_delegator = TaskDelegator(agent=self)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Team infrastructure initialized for agent: %s",
|
"Team infrastructure initialized for agent: %s",
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from datetime import UTC, datetime
|
from datetime import datetime, timezone
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Set
|
from typing import Any, Callable, Dict, Iterable, List, Optional, Set
|
||||||
@@ -78,7 +78,7 @@ class ApprovalRecord:
|
|||||||
self.session_id = session_id
|
self.session_id = session_id
|
||||||
self.status = ApprovalStatus.PENDING
|
self.status = ApprovalStatus.PENDING
|
||||||
self.findings = findings or []
|
self.findings = findings or []
|
||||||
self.created_at = datetime.now(UTC)
|
self.created_at = datetime.now(timezone.utc)
|
||||||
self.resolved_at: Optional[datetime] = None
|
self.resolved_at: Optional[datetime] = None
|
||||||
self.resolved_by: Optional[str] = None
|
self.resolved_by: Optional[str] = None
|
||||||
self.metadata: Dict[str, Any] = {}
|
self.metadata: Dict[str, Any] = {}
|
||||||
@@ -163,7 +163,7 @@ class ToolGuardStore:
|
|||||||
return record
|
return record
|
||||||
|
|
||||||
record.status = status
|
record.status = status
|
||||||
record.resolved_at = datetime.now(UTC)
|
record.resolved_at = datetime.now(timezone.utc)
|
||||||
record.resolved_by = resolved_by
|
record.resolved_by = resolved_by
|
||||||
if notify_request and record.pending_request:
|
if notify_request and record.pending_request:
|
||||||
if status == ApprovalStatus.APPROVED:
|
if status == ApprovalStatus.APPROVED:
|
||||||
|
|||||||
@@ -312,12 +312,21 @@ class RunWorkspaceManager:
|
|||||||
"- 审阅分析以理解市场观点\n"
|
"- 审阅分析以理解市场观点\n"
|
||||||
"- 在做决策前先考虑风险警告\n"
|
"- 在做决策前先考虑风险警告\n"
|
||||||
"- 评估当前投资组合持仓、现金与保证金占用\n"
|
"- 评估当前投资组合持仓、现金与保证金占用\n"
|
||||||
|
"- 在做最终决策前,先判断当前团队是否足以覆盖任务;如果覆盖不足,不要勉强给结论,先扩编团队\n"
|
||||||
|
"- 当现有团队覆盖不足、观点分歧过大、或出现新的专业分析需求时,优先考虑动态创建合适的分析师,再继续讨论\n"
|
||||||
"- 决策必须与整体投资目标和风险约束一致\n\n"
|
"- 决策必须与整体投资目标和风险约束一致\n\n"
|
||||||
|
"动态扩编触发条件:\n"
|
||||||
|
"- 出现当前团队未覆盖的研究领域:期权、宏观、行业专项、事件驱动、监管冲击、加密资产、商品链、特殊市场结构\n"
|
||||||
|
"- 关键 ticker 的结论依赖某种专业知识,但现有 analyst 无法提供直接证据链\n"
|
||||||
|
"- 分析师之间存在明显冲突,且仅靠风险经理无法完成裁决\n"
|
||||||
|
"- 你需要第二个同类型但不同风格的 analyst 来验证一个高风险假设\n\n"
|
||||||
"决策类型:\n"
|
"决策类型:\n"
|
||||||
'- `long`:看涨,建议买入\n'
|
'- `long`:看涨,建议买入\n'
|
||||||
'- `short`:看跌,建议卖出或做空\n'
|
'- `short`:看跌,建议卖出或做空\n'
|
||||||
'- `hold`:中性,维持当前持仓\n\n'
|
'- `hold`:中性,维持当前持仓\n\n'
|
||||||
"输出要求:\n"
|
"输出要求:\n"
|
||||||
|
"- 触发扩编条件时,必须先使用动态团队工具创建分析师,并在继续决策前吸收其分析输入\n"
|
||||||
|
"- 不允许口头声称“需要更多分析”但不实际调用创建工具\n"
|
||||||
"- 使用 `make_decision` 工具记录每个股票的最终决策\n"
|
"- 使用 `make_decision` 工具记录每个股票的最终决策\n"
|
||||||
"- 记录完成后给出投资逻辑总结\n"
|
"- 记录完成后给出投资逻辑总结\n"
|
||||||
"- 最终总结必须使用简体中文\n"
|
"- 最终总结必须使用简体中文\n"
|
||||||
@@ -327,6 +336,10 @@ class RunWorkspaceManager:
|
|||||||
"- 在决定数量时考虑可用现金,不要超出现金允许范围\n"
|
"- 在决定数量时考虑可用现金,不要超出现金允许范围\n"
|
||||||
"- 考虑做空头寸的保证金要求\n"
|
"- 考虑做空头寸的保证金要求\n"
|
||||||
"- 仓位规模相对于组合总资产保持保守\n"
|
"- 仓位规模相对于组合总资产保持保守\n"
|
||||||
|
"- 当任务涉及当前团队未覆盖的领域(如期权、宏观、行业专项、事件驱动、加密资产等)时,应优先创建或克隆对应分析师,而不是勉强用现有团队输出低质量结论\n"
|
||||||
|
"- 当分析师之间长期存在高冲突且缺乏裁决信息时,应考虑增加一个补充视角的分析师\n"
|
||||||
|
"- 如果你已经识别出覆盖缺口,却没有调用动态团队工具补齐团队,就不应直接输出高置信度交易决策\n"
|
||||||
|
"- 对新创建分析师的输出必须纳入本轮决策依据,不能创建后忽略\n"
|
||||||
"- 始终为决策提供清晰理由\n"
|
"- 始终为决策提供清晰理由\n"
|
||||||
"- 不要输出英文投资报告或英文结论\n"
|
"- 不要输出英文投资报告或英文结论\n"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Provides REST API endpoints for tool guard operations.
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
from datetime import UTC, datetime
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException
|
from fastapi import APIRouter, HTTPException
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
@@ -146,7 +146,7 @@ async def check_tool_call(
|
|||||||
|
|
||||||
if request.tool_name in SAFE_TOOLS:
|
if request.tool_name in SAFE_TOOLS:
|
||||||
record.status = ApprovalStatus.APPROVED
|
record.status = ApprovalStatus.APPROVED
|
||||||
record.resolved_at = datetime.now(UTC)
|
record.resolved_at = datetime.now(timezone.utc)
|
||||||
record.resolved_by = "system"
|
record.resolved_by = "system"
|
||||||
STORE.set_status(
|
STORE.set_status(
|
||||||
record.approval_id,
|
record.approval_id,
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import asyncio
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@@ -154,6 +155,7 @@ class RunContextResponse(BaseModel):
|
|||||||
|
|
||||||
class RuntimeAgentState(BaseModel):
|
class RuntimeAgentState(BaseModel):
|
||||||
agent_id: str
|
agent_id: str
|
||||||
|
display_name: Optional[str] = None
|
||||||
status: str
|
status: str
|
||||||
last_session: Optional[str] = None
|
last_session: Optional[str] = None
|
||||||
last_updated: str
|
last_updated: str
|
||||||
@@ -300,6 +302,70 @@ def _load_run_server_state(run_dir: Path) -> Dict[str, Any]:
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_runtime_agent_display_name(run_id: str, agent_id: str) -> Optional[str]:
|
||||||
|
"""Best-effort display name for one runtime agent.
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. PROFILE.md line like `角色定位:中文名`
|
||||||
|
2. PROFILE.md YAML frontmatter field `name`
|
||||||
|
"""
|
||||||
|
asset_dir = PROJECT_ROOT / "runs" / run_id / "agents" / agent_id
|
||||||
|
profile_path = asset_dir / "PROFILE.md"
|
||||||
|
if not profile_path.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw = profile_path.read_text(encoding="utf-8").strip()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not raw:
|
||||||
|
return None
|
||||||
|
|
||||||
|
frontmatter_name: Optional[str] = None
|
||||||
|
if raw.startswith("---"):
|
||||||
|
parts = raw.split("---", 2)
|
||||||
|
if len(parts) >= 3:
|
||||||
|
try:
|
||||||
|
import yaml
|
||||||
|
parsed = yaml.safe_load(parts[1].strip()) or {}
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
value = parsed.get("name")
|
||||||
|
if isinstance(value, str) and value.strip():
|
||||||
|
frontmatter_name = value.strip()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raw = parts[2].strip()
|
||||||
|
|
||||||
|
for line in raw.splitlines():
|
||||||
|
normalized = line.strip()
|
||||||
|
if normalized.startswith("角色定位:"):
|
||||||
|
value = normalized.split(":", 1)[1].strip()
|
||||||
|
if value:
|
||||||
|
return value
|
||||||
|
if normalized.lower().startswith("role:"):
|
||||||
|
value = normalized.split(":", 1)[1].strip()
|
||||||
|
if value:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return frontmatter_name
|
||||||
|
|
||||||
|
|
||||||
|
def _enrich_runtime_agents(run_id: Optional[str], agents: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
if not run_id:
|
||||||
|
return agents
|
||||||
|
|
||||||
|
enriched: List[Dict[str, Any]] = []
|
||||||
|
for item in agents:
|
||||||
|
payload = dict(item)
|
||||||
|
display_name = payload.get("display_name")
|
||||||
|
agent_id = str(payload.get("agent_id") or "").strip()
|
||||||
|
if agent_id and (not isinstance(display_name, str) or not display_name.strip()):
|
||||||
|
payload["display_name"] = _resolve_runtime_agent_display_name(run_id, agent_id)
|
||||||
|
enriched.append(payload)
|
||||||
|
return enriched
|
||||||
|
|
||||||
|
|
||||||
def _extract_history_metrics(run_dir: Path) -> tuple[int, Optional[float]]:
|
def _extract_history_metrics(run_dir: Path) -> tuple[int, Optional[float]]:
|
||||||
"""Prefer runtime state files over dashboard exports for history summaries."""
|
"""Prefer runtime state files over dashboard exports for history summaries."""
|
||||||
server_state = _load_run_server_state(run_dir)
|
server_state = _load_run_server_state(run_dir)
|
||||||
@@ -566,10 +632,11 @@ def _validate_gateway_config(bootstrap: Dict[str, Any]) -> List[str]:
|
|||||||
# Check LLM configuration
|
# Check LLM configuration
|
||||||
model_name = os.getenv("MODEL_NAME")
|
model_name = os.getenv("MODEL_NAME")
|
||||||
openai_key = os.getenv("OPENAI_API_KEY")
|
openai_key = os.getenv("OPENAI_API_KEY")
|
||||||
|
dashscope_key = os.getenv("DASHSCOPE_API_KEY")
|
||||||
if not model_name:
|
if not model_name:
|
||||||
errors.append("MODEL_NAME environment variable is not set")
|
errors.append("MODEL_NAME environment variable is not set")
|
||||||
if not openai_key:
|
if not openai_key and not dashscope_key:
|
||||||
errors.append("OPENAI_API_KEY environment variable is not set")
|
errors.append("Either OPENAI_API_KEY or DASHSCOPE_API_KEY environment variable must be set")
|
||||||
|
|
||||||
# Validate tickers
|
# Validate tickers
|
||||||
tickers = bootstrap.get("tickers", [])
|
tickers = bootstrap.get("tickers", [])
|
||||||
@@ -722,7 +789,8 @@ async def get_run_context() -> RunContextResponse:
|
|||||||
async def get_runtime_agents() -> RuntimeAgentsResponse:
|
async def get_runtime_agents() -> RuntimeAgentsResponse:
|
||||||
"""Return agent states from the active runtime, or latest persisted run."""
|
"""Return agent states from the active runtime, or latest persisted run."""
|
||||||
snapshot = _get_active_runtime_snapshot() if _is_gateway_running() else _load_latest_runtime_snapshot()
|
snapshot = _get_active_runtime_snapshot() if _is_gateway_running() else _load_latest_runtime_snapshot()
|
||||||
agents = snapshot.get("agents", [])
|
run_id = snapshot.get("context", {}).get("config_name")
|
||||||
|
agents = _enrich_runtime_agents(run_id, snapshot.get("agents", []))
|
||||||
|
|
||||||
return RuntimeAgentsResponse(
|
return RuntimeAgentsResponse(
|
||||||
agents=[RuntimeAgentState(**a) for a in agents]
|
agents=[RuntimeAgentState(**a) for a in agents]
|
||||||
@@ -869,11 +937,24 @@ def _load_latest_runtime_snapshot() -> Dict[str, Any]:
|
|||||||
|
|
||||||
|
|
||||||
def _get_active_runtime_snapshot() -> Dict[str, Any]:
|
def _get_active_runtime_snapshot() -> Dict[str, Any]:
|
||||||
"""Return the active runtime snapshot, preferring in-memory manager state."""
|
"""Return the active runtime snapshot.
|
||||||
|
|
||||||
|
For a running Gateway, the canonical runtime source of truth is the
|
||||||
|
run-scoped snapshot file under `runs/<run_id>/state/runtime_state.json`,
|
||||||
|
because the Gateway subprocess mutates it directly while the parent
|
||||||
|
runtime_service process may still hold a stale in-memory manager snapshot.
|
||||||
|
"""
|
||||||
if not _is_gateway_running():
|
if not _is_gateway_running():
|
||||||
raise HTTPException(status_code=404, detail="No runtime is currently running")
|
raise HTTPException(status_code=404, detail="No runtime is currently running")
|
||||||
|
|
||||||
manager = _runtime_state.runtime_manager
|
manager = _runtime_state.runtime_manager
|
||||||
|
if manager is not None:
|
||||||
|
run_id = str(getattr(manager, "config_name", "") or "").strip()
|
||||||
|
if run_id:
|
||||||
|
snapshot_path = _get_run_dir(run_id) / "state" / "runtime_state.json"
|
||||||
|
if snapshot_path.exists():
|
||||||
|
return json.loads(snapshot_path.read_text(encoding="utf-8"))
|
||||||
|
|
||||||
if manager is not None and hasattr(manager, "build_snapshot"):
|
if manager is not None and hasattr(manager, "build_snapshot"):
|
||||||
snapshot = manager.build_snapshot()
|
snapshot = manager.build_snapshot()
|
||||||
context = snapshot.get("context") or {}
|
context = snapshot.get("context") or {}
|
||||||
@@ -900,11 +981,32 @@ def _read_log_tail(path: Path, max_chars: int = 120_000) -> str:
|
|||||||
if not path.exists() or not path.is_file():
|
if not path.exists() or not path.is_file():
|
||||||
return ""
|
return ""
|
||||||
text = path.read_text(encoding="utf-8", errors="replace")
|
text = path.read_text(encoding="utf-8", errors="replace")
|
||||||
|
text = _sanitize_runtime_log_text(text)
|
||||||
if len(text) <= max_chars:
|
if len(text) <= max_chars:
|
||||||
return text
|
return text
|
||||||
return text[-max_chars:]
|
return text[-max_chars:]
|
||||||
|
|
||||||
|
|
||||||
|
def _sanitize_runtime_log_text(text: str) -> str:
|
||||||
|
if not text:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Drop repetitive development-only warnings for unsandboxed skill execution.
|
||||||
|
text = re.sub(
|
||||||
|
r"(?:^|\n)=+\n"
|
||||||
|
r"⚠️\s+\[安全警告\]\s+技能在无沙盒模式下运行\s+\(SKILL_SANDBOX_MODE=none\)\n"
|
||||||
|
r"\s+技能脚本将直接在当前进程中执行,无隔离保护。\n"
|
||||||
|
r"\s+建议:生产环境请设置\s+SKILL_SANDBOX_MODE=docker\n"
|
||||||
|
r"=+\n?",
|
||||||
|
"\n",
|
||||||
|
text,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
text = re.sub(r"\n{3,}", "\n\n", text)
|
||||||
|
return text.strip()
|
||||||
|
|
||||||
|
|
||||||
def _get_current_runtime_context() -> Dict[str, Any]:
|
def _get_current_runtime_context() -> Dict[str, Any]:
|
||||||
"""Return the active runtime context from the latest snapshot."""
|
"""Return the active runtime context from the latest snapshot."""
|
||||||
if not _is_gateway_running():
|
if not _is_gateway_running():
|
||||||
|
|||||||
@@ -81,7 +81,12 @@ async def proxy_ws(ws: WebSocket):
|
|||||||
await ws.accept()
|
await ws.accept()
|
||||||
upstream = None
|
upstream = None
|
||||||
try:
|
try:
|
||||||
upstream = await websockets.asyncio.client.connect(gateway_url)
|
upstream = await websockets.asyncio.client.connect(
|
||||||
|
gateway_url,
|
||||||
|
ping_interval=20,
|
||||||
|
ping_timeout=120,
|
||||||
|
max_size=10 * 1024 * 1024, # 10MB
|
||||||
|
)
|
||||||
|
|
||||||
async def client_to_upstream():
|
async def client_to_upstream():
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -28,11 +28,11 @@ def create_app() -> FastAPI:
|
|||||||
add_cors_middleware(app)
|
add_cors_middleware(app)
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
async def health_check() -> dict[str, str]:
|
def health_check() -> dict[str, str]:
|
||||||
return {"status": "healthy", "service": "news-service"}
|
return {"status": "healthy", "service": "news-service"}
|
||||||
|
|
||||||
@app.get("/api/enriched-news")
|
@app.get("/api/enriched-news")
|
||||||
async def api_get_enriched_news(
|
def api_get_enriched_news(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
start_date: str | None = Query(None),
|
start_date: str | None = Query(None),
|
||||||
end_date: str | None = Query(None),
|
end_date: str | None = Query(None),
|
||||||
@@ -49,7 +49,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/news-for-date")
|
@app.get("/api/news-for-date")
|
||||||
async def api_get_news_for_date(
|
def api_get_news_for_date(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
date: str = Query(...),
|
date: str = Query(...),
|
||||||
limit: int = Query(20, ge=1, le=100),
|
limit: int = Query(20, ge=1, le=100),
|
||||||
@@ -64,7 +64,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/news-timeline")
|
@app.get("/api/news-timeline")
|
||||||
async def api_get_news_timeline(
|
def api_get_news_timeline(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
start_date: str = Query(...),
|
start_date: str = Query(...),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
@@ -79,7 +79,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/categories")
|
@app.get("/api/categories")
|
||||||
async def api_get_categories(
|
def api_get_categories(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
start_date: str | None = Query(None),
|
start_date: str | None = Query(None),
|
||||||
end_date: str | None = Query(None),
|
end_date: str | None = Query(None),
|
||||||
@@ -96,7 +96,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/similar-days")
|
@app.get("/api/similar-days")
|
||||||
async def api_get_similar_days(
|
def api_get_similar_days(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
date: str = Query(...),
|
date: str = Query(...),
|
||||||
n_similar: int = Query(5, ge=1, le=20),
|
n_similar: int = Query(5, ge=1, le=20),
|
||||||
@@ -111,7 +111,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/stories/{ticker}")
|
@app.get("/api/stories/{ticker}")
|
||||||
async def api_get_story(
|
def api_get_story(
|
||||||
ticker: str,
|
ticker: str,
|
||||||
as_of_date: str = Query(...),
|
as_of_date: str = Query(...),
|
||||||
store: MarketStore = Depends(get_market_store),
|
store: MarketStore = Depends(get_market_store),
|
||||||
@@ -124,7 +124,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/range-explain")
|
@app.get("/api/range-explain")
|
||||||
async def api_get_range_explain(
|
def api_get_range_explain(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
start_date: str = Query(...),
|
start_date: str = Query(...),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
|
|||||||
@@ -29,12 +29,12 @@ def create_app() -> FastAPI:
|
|||||||
add_cors_middleware(app)
|
add_cors_middleware(app)
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
async def health_check() -> dict[str, str]:
|
def health_check() -> dict[str, str]:
|
||||||
"""Health check endpoint."""
|
"""Health check endpoint."""
|
||||||
return {"status": "healthy", "service": "trading-service"}
|
return {"status": "healthy", "service": "trading-service"}
|
||||||
|
|
||||||
@app.get("/api/prices", response_model=PriceResponse)
|
@app.get("/api/prices", response_model=PriceResponse)
|
||||||
async def api_get_prices(
|
def api_get_prices(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
start_date: str = Query(...),
|
start_date: str = Query(...),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
@@ -47,7 +47,7 @@ def create_app() -> FastAPI:
|
|||||||
return PriceResponse(ticker=payload["ticker"], prices=payload["prices"])
|
return PriceResponse(ticker=payload["ticker"], prices=payload["prices"])
|
||||||
|
|
||||||
@app.get("/api/financials", response_model=FinancialMetricsResponse)
|
@app.get("/api/financials", response_model=FinancialMetricsResponse)
|
||||||
async def api_get_financials(
|
def api_get_financials(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
period: str = Query("ttm"),
|
period: str = Query("ttm"),
|
||||||
@@ -62,7 +62,7 @@ def create_app() -> FastAPI:
|
|||||||
return FinancialMetricsResponse(financial_metrics=payload["financial_metrics"])
|
return FinancialMetricsResponse(financial_metrics=payload["financial_metrics"])
|
||||||
|
|
||||||
@app.get("/api/news", response_model=CompanyNewsResponse)
|
@app.get("/api/news", response_model=CompanyNewsResponse)
|
||||||
async def api_get_news(
|
def api_get_news(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
start_date: str | None = Query(None),
|
start_date: str | None = Query(None),
|
||||||
@@ -77,7 +77,7 @@ def create_app() -> FastAPI:
|
|||||||
return CompanyNewsResponse(news=payload["news"])
|
return CompanyNewsResponse(news=payload["news"])
|
||||||
|
|
||||||
@app.get("/api/insider-trades", response_model=InsiderTradeResponse)
|
@app.get("/api/insider-trades", response_model=InsiderTradeResponse)
|
||||||
async def api_get_insider_trades(
|
def api_get_insider_trades(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
start_date: str | None = Query(None),
|
start_date: str | None = Query(None),
|
||||||
@@ -92,12 +92,12 @@ def create_app() -> FastAPI:
|
|||||||
return InsiderTradeResponse(insider_trades=payload["insider_trades"])
|
return InsiderTradeResponse(insider_trades=payload["insider_trades"])
|
||||||
|
|
||||||
@app.get("/api/market/status")
|
@app.get("/api/market/status")
|
||||||
async def api_get_market_status() -> dict[str, Any]:
|
def api_get_market_status() -> dict[str, Any]:
|
||||||
"""Return current market status using the existing market service logic."""
|
"""Return current market status using the existing market service logic."""
|
||||||
return trading_domain.get_market_status_payload()
|
return trading_domain.get_market_status_payload()
|
||||||
|
|
||||||
@app.get("/api/market-cap")
|
@app.get("/api/market-cap")
|
||||||
async def api_get_market_cap(
|
def api_get_market_cap(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
@@ -108,7 +108,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/api/line-items", response_model=LineItemResponse)
|
@app.get("/api/line-items", response_model=LineItemResponse)
|
||||||
async def api_get_line_items(
|
def api_get_line_items(
|
||||||
ticker: str = Query(..., min_length=1),
|
ticker: str = Query(..., min_length=1),
|
||||||
line_items: list[str] = Query(...),
|
line_items: list[str] = Query(...),
|
||||||
end_date: str = Query(...),
|
end_date: str = Query(...),
|
||||||
|
|||||||
@@ -27,8 +27,10 @@ valuation_analyst:
|
|||||||
portfolio_manager:
|
portfolio_manager:
|
||||||
skills:
|
skills:
|
||||||
- portfolio_decisioning
|
- portfolio_decisioning
|
||||||
|
- dynamic_team_management
|
||||||
active_tool_groups:
|
active_tool_groups:
|
||||||
- portfolio_ops
|
- portfolio_ops
|
||||||
|
- dynamic_team
|
||||||
|
|
||||||
risk_manager:
|
risk_manager:
|
||||||
skills:
|
skills:
|
||||||
|
|||||||
@@ -6,11 +6,13 @@ Core Pipeline - Orchestrates multi-agent analysis and decision-making
|
|||||||
# flake8: noqa: E501
|
# flake8: noqa: E501
|
||||||
# pylint: disable=W0613,C0301
|
# pylint: disable=W0613,C0301
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from contextlib import nullcontext
|
from contextlib import nullcontext
|
||||||
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Awaitable, Callable, Dict, List, Optional
|
from typing import Any, Awaitable, Callable, Dict, List, Optional
|
||||||
|
|
||||||
@@ -32,7 +34,7 @@ from backend.agents.toolkit_factory import create_agent_toolkit
|
|||||||
from backend.agents.workspace_manager import WorkspaceManager
|
from backend.agents.workspace_manager import WorkspaceManager
|
||||||
from backend.agents.prompt_loader import get_prompt_loader
|
from backend.agents.prompt_loader import get_prompt_loader
|
||||||
from backend.llm.models import get_agent_formatter, get_agent_model
|
from backend.llm.models import get_agent_formatter, get_agent_model
|
||||||
from backend.config.constants import ANALYST_TYPES
|
from backend.config.constants import ANALYST_TYPES, AGENT_CONFIG
|
||||||
from backend.agents.dynamic_team_types import AnalystConfig
|
from backend.agents.dynamic_team_types import AnalystConfig
|
||||||
from backend.tools.dynamic_team_tools import DynamicTeamController, set_controller
|
from backend.tools.dynamic_team_tools import DynamicTeamController, set_controller
|
||||||
|
|
||||||
@@ -70,12 +72,10 @@ def _resolve_evo_agent_ids() -> set[str]:
|
|||||||
# Team infrastructure imports (graceful import - may not exist yet)
|
# Team infrastructure imports (graceful import - may not exist yet)
|
||||||
try:
|
try:
|
||||||
from backend.agents.team.team_coordinator import TeamCoordinator
|
from backend.agents.team.team_coordinator import TeamCoordinator
|
||||||
from backend.agents.team.msg_hub import MsgHub as TeamMsgHub
|
|
||||||
TEAM_COORD_AVAILABLE = True
|
TEAM_COORD_AVAILABLE = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
TEAM_COORD_AVAILABLE = False
|
TEAM_COORD_AVAILABLE = False
|
||||||
TeamCoordinator = None
|
TeamCoordinator = None
|
||||||
TeamMsgHub = None
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -103,7 +103,7 @@ class TradingPipeline:
|
|||||||
|
|
||||||
Real-time updates via StateSync after each agent completes.
|
Real-time updates via StateSync after each agent completes.
|
||||||
|
|
||||||
Supports both legacy agent lists and run-scoped agent loading.
|
Supports run-scoped EvoAgent loading with workspace-driven configuration.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -142,7 +142,7 @@ class TradingPipeline:
|
|||||||
self._team_controller = DynamicTeamController(
|
self._team_controller = DynamicTeamController(
|
||||||
create_callback=self._create_runtime_analyst,
|
create_callback=self._create_runtime_analyst,
|
||||||
remove_callback=self._remove_runtime_analyst,
|
remove_callback=self._remove_runtime_analyst,
|
||||||
get_analysts_callback=self._all_analysts,
|
get_analysts_callback=lambda: self._all_analysts() + [self.risk_manager, self.pm],
|
||||||
)
|
)
|
||||||
set_controller(self._team_controller)
|
set_controller(self._team_controller)
|
||||||
|
|
||||||
@@ -230,8 +230,25 @@ class TradingPipeline:
|
|||||||
"system",
|
"system",
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
# Phase 1.1: Analysts
|
# Phase 1.0: PM assesses team coverage and expands if needed
|
||||||
if not last_phase or last_phase == "cleared":
|
if not last_phase or last_phase == "cleared":
|
||||||
|
_log("Phase 1.0: Team gap assessment")
|
||||||
|
await self._run_team_gap_assessment(
|
||||||
|
tickers=tickers,
|
||||||
|
date=date,
|
||||||
|
prices=prices,
|
||||||
|
)
|
||||||
|
active_analysts = self._get_active_analysts()
|
||||||
|
if self.runtime_manager:
|
||||||
|
self._runtime_batch_status(active_analysts, "analysis_in_progress")
|
||||||
|
self._save_checkpoint(session_key, "team_assessment", {
|
||||||
|
"prices": prices,
|
||||||
|
"close_prices": close_prices,
|
||||||
|
})
|
||||||
|
last_phase = "team_assessment"
|
||||||
|
|
||||||
|
# Phase 1.1: Analysts
|
||||||
|
if last_phase == "team_assessment":
|
||||||
_log("Phase 1.1: Analyst analysis (parallel)")
|
_log("Phase 1.1: Analyst analysis (parallel)")
|
||||||
analyst_results = await self._run_analysts_parallel(
|
analyst_results = await self._run_analysts_parallel(
|
||||||
tickers,
|
tickers,
|
||||||
@@ -754,6 +771,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_agent_complete(
|
await self.state_sync.on_agent_complete(
|
||||||
agent_id="Daily Log",
|
agent_id="Daily Log",
|
||||||
content=reflection_content,
|
content=reflection_content,
|
||||||
|
agent_name="每日记录",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Phase 6: APO (Autonomous Policy Optimization)
|
# Phase 6: APO (Autonomous Policy Optimization)
|
||||||
@@ -1020,12 +1038,13 @@ class TradingPipeline:
|
|||||||
|
|
||||||
pm_msg = Msg(name="system", content=pm_prompt, role="user")
|
pm_msg = Msg(name="system", content=pm_prompt, role="user")
|
||||||
pm_response = await self.pm.reply(pm_msg)
|
pm_response = await self.pm.reply(pm_msg)
|
||||||
|
pm_content = self._extract_text_content(pm_response.content)
|
||||||
|
|
||||||
if self.state_sync:
|
if self.state_sync:
|
||||||
pm_content = self._extract_text_content(pm_response.content)
|
|
||||||
await self.state_sync.on_conference_message(
|
await self.state_sync.on_conference_message(
|
||||||
agent_id="portfolio_manager",
|
agent_id="portfolio_manager",
|
||||||
content=pm_content,
|
content=pm_content,
|
||||||
|
agent_name=self._resolve_agent_display_name("portfolio_manager"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Analysts share perspectives (supports per-round active team updates)
|
# Analysts share perspectives (supports per-round active team updates)
|
||||||
@@ -1050,6 +1069,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_conference_message(
|
await self.state_sync.on_conference_message(
|
||||||
agent_id=analyst.name,
|
agent_id=analyst.name,
|
||||||
content=analyst_content,
|
content=analyst_content,
|
||||||
|
agent_name=self._resolve_agent_display_name(analyst.name),
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.state_sync:
|
if self.state_sync:
|
||||||
@@ -1082,6 +1102,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_conference_message(
|
await self.state_sync.on_conference_message(
|
||||||
agent_id="conference summary",
|
agent_id="conference summary",
|
||||||
content=conference_summary,
|
content=conference_summary,
|
||||||
|
agent_name="会议总结",
|
||||||
)
|
)
|
||||||
await self.state_sync.on_conference_end()
|
await self.state_sync.on_conference_end()
|
||||||
|
|
||||||
@@ -1139,6 +1160,116 @@ class TradingPipeline:
|
|||||||
f"and any remaining concerns about {', '.join(tickers)}."
|
f"and any remaining concerns about {', '.join(tickers)}."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _run_team_gap_assessment(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
tickers: List[str],
|
||||||
|
date: str,
|
||||||
|
prices: Optional[Dict[str, float]],
|
||||||
|
) -> str:
|
||||||
|
active_analysts = self._get_active_analysts()
|
||||||
|
team_summary = [
|
||||||
|
{
|
||||||
|
"agent_id": analyst.name,
|
||||||
|
"display_name": self._resolve_agent_display_name(analyst.name),
|
||||||
|
}
|
||||||
|
for analyst in active_analysts
|
||||||
|
]
|
||||||
|
prompt = (
|
||||||
|
f"As Portfolio Manager, perform a team coverage assessment before analysis for {date}.\n\n"
|
||||||
|
f"Tickers: {', '.join(tickers)}\n"
|
||||||
|
f"Current team: {json.dumps(team_summary, ensure_ascii=False, indent=2)}\n"
|
||||||
|
f"Current prices snapshot: {json.dumps(prices, ensure_ascii=False, indent=2) if prices else 'N/A'}\n\n"
|
||||||
|
"Your job in this phase is not to make investment decisions. "
|
||||||
|
"First decide whether the current team has enough domain coverage. "
|
||||||
|
"If the team is insufficient, immediately call dynamic team tools to create or clone the needed analysts now. "
|
||||||
|
"Before creating any analyst, explicitly check whether an existing analyst already covers that role. "
|
||||||
|
"Do not create duplicate roles with different IDs but the same responsibilities. "
|
||||||
|
"If the current team is sufficient, explicitly say the current team is sufficient and explain why."
|
||||||
|
)
|
||||||
|
msg = Msg(name="system", content=prompt, role="user")
|
||||||
|
response = await self.pm.reply(msg)
|
||||||
|
pm_content = self._extract_text_content(response.content)
|
||||||
|
enforced_pm_content = await self._enforce_pm_team_expansion_if_needed(
|
||||||
|
tickers=tickers,
|
||||||
|
date=date,
|
||||||
|
pm_content=pm_content,
|
||||||
|
)
|
||||||
|
if enforced_pm_content:
|
||||||
|
pm_content = enforced_pm_content
|
||||||
|
|
||||||
|
if self.state_sync:
|
||||||
|
await self.state_sync.on_agent_complete(
|
||||||
|
agent_id="portfolio_manager",
|
||||||
|
agent_name=self._resolve_agent_display_name("portfolio_manager"),
|
||||||
|
content=pm_content,
|
||||||
|
)
|
||||||
|
|
||||||
|
return pm_content
|
||||||
|
|
||||||
|
def _pm_requests_team_expansion(self, text: str) -> bool:
|
||||||
|
normalized = (text or "").strip().lower()
|
||||||
|
if not normalized:
|
||||||
|
return False
|
||||||
|
|
||||||
|
phrases = [
|
||||||
|
"创建",
|
||||||
|
"新增分析师",
|
||||||
|
"补充分析师",
|
||||||
|
"扩编团队",
|
||||||
|
"需要行业分析师",
|
||||||
|
"需要量化分析师",
|
||||||
|
"需要宏观分析师",
|
||||||
|
"需要补充",
|
||||||
|
"先扩编",
|
||||||
|
"create analyst",
|
||||||
|
"create a new analyst",
|
||||||
|
"add analyst",
|
||||||
|
"expand the team",
|
||||||
|
"need a specialist",
|
||||||
|
"need another analyst",
|
||||||
|
]
|
||||||
|
return any(phrase in normalized for phrase in phrases)
|
||||||
|
|
||||||
|
async def _enforce_pm_team_expansion_if_needed(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
tickers: List[str],
|
||||||
|
date: str,
|
||||||
|
pm_content: str,
|
||||||
|
) -> Optional[str]:
|
||||||
|
if not self._pm_requests_team_expansion(pm_content):
|
||||||
|
return None
|
||||||
|
|
||||||
|
before_ids = {agent.name for agent in self._get_active_analysts()}
|
||||||
|
|
||||||
|
followup_prompt = (
|
||||||
|
f"You identified a team coverage gap for {date} across {', '.join(tickers)}. "
|
||||||
|
"This is still the pre-analysis team assessment phase. "
|
||||||
|
"Do not merely recommend adding analysts. If additional analysts are needed, "
|
||||||
|
"you must now call the dynamic team tools (`create_analyst` or `clone_analyst`) "
|
||||||
|
"to add the required specialists before analyst analysis begins. "
|
||||||
|
"Only after the tool call succeeds may you explain why the new analysts were added. "
|
||||||
|
"If you truly believe the current team is sufficient, explicitly say the current team is sufficient."
|
||||||
|
)
|
||||||
|
followup_msg = Msg(name="system", content=followup_prompt, role="user")
|
||||||
|
followup_response = await self.pm.reply(followup_msg)
|
||||||
|
followup_content = self._extract_text_content(followup_response.content)
|
||||||
|
after_ids = {agent.name for agent in self._get_active_analysts()}
|
||||||
|
|
||||||
|
if after_ids != before_ids:
|
||||||
|
created = sorted(after_ids - before_ids)
|
||||||
|
logger.info(
|
||||||
|
"PM team expansion enforced successfully before analysis; added analysts=%s",
|
||||||
|
created,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
"PM mentioned expansion in team assessment but did not add analysts after enforcement prompt",
|
||||||
|
)
|
||||||
|
|
||||||
|
return followup_content
|
||||||
|
|
||||||
def _build_analyst_discussion_prompt(
|
def _build_analyst_discussion_prompt(
|
||||||
self,
|
self,
|
||||||
cycle: int,
|
cycle: int,
|
||||||
@@ -1152,6 +1283,88 @@ class TradingPipeline:
|
|||||||
f"Do not use tools - focus on sharing your professional opinion."
|
f"Do not use tools - focus on sharing your professional opinion."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _resolve_agent_display_name(self, agent_id: str) -> str:
|
||||||
|
runtime_name = None
|
||||||
|
if self.runtime_manager:
|
||||||
|
state = self.runtime_manager.get_agent_state(agent_id)
|
||||||
|
runtime_name = getattr(state, "display_name", None) if state else None
|
||||||
|
if isinstance(runtime_name, str) and runtime_name.strip():
|
||||||
|
return runtime_name.strip()
|
||||||
|
|
||||||
|
static_name = AGENT_CONFIG.get(agent_id, {}).get("name")
|
||||||
|
if isinstance(static_name, str) and static_name.strip():
|
||||||
|
return static_name.strip()
|
||||||
|
|
||||||
|
profile_path = Path(__file__).resolve().parents[2] / "runs" / self.runtime_manager.config_name / "agents" / agent_id / "PROFILE.md" if self.runtime_manager else None
|
||||||
|
if profile_path and profile_path.exists():
|
||||||
|
try:
|
||||||
|
raw = profile_path.read_text(encoding="utf-8")
|
||||||
|
for line in raw.splitlines():
|
||||||
|
text = line.strip()
|
||||||
|
if text.startswith("角色定位:"):
|
||||||
|
value = text.split(":", 1)[1].strip()
|
||||||
|
if value:
|
||||||
|
return value
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return agent_id
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_role_key(value: str) -> str:
|
||||||
|
normalized = (value or "").strip().lower()
|
||||||
|
normalized = normalized.replace("_", "")
|
||||||
|
normalized = normalized.replace(" ", "")
|
||||||
|
replacements = {
|
||||||
|
"analyst": "分析师",
|
||||||
|
"macro": "宏观",
|
||||||
|
"technical": "技术",
|
||||||
|
"tech": "技术",
|
||||||
|
"sentiment": "情绪",
|
||||||
|
"fundamentals": "基本面",
|
||||||
|
"fundamental": "基本面",
|
||||||
|
"valuation": "估值",
|
||||||
|
"crypto": "加密",
|
||||||
|
"cryptocurrency": "加密",
|
||||||
|
"semiconductor": "半导体",
|
||||||
|
"industry": "行业",
|
||||||
|
"sector": "行业",
|
||||||
|
"risk": "风险",
|
||||||
|
}
|
||||||
|
for src, target in replacements.items():
|
||||||
|
normalized = normalized.replace(src, target)
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _contains_cjk(value: str) -> bool:
|
||||||
|
text = (value or "").strip()
|
||||||
|
return any("\u4e00" <= ch <= "\u9fff" for ch in text)
|
||||||
|
|
||||||
|
def _find_similar_existing_analyst(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
agent_id: str,
|
||||||
|
analyst_type: str,
|
||||||
|
custom_config: Optional[AnalystConfig],
|
||||||
|
) -> Optional[str]:
|
||||||
|
requested_names = {self._normalize_role_key(agent_id)}
|
||||||
|
if custom_config and custom_config.persona and custom_config.persona.name:
|
||||||
|
requested_names.add(self._normalize_role_key(custom_config.persona.name))
|
||||||
|
|
||||||
|
for agent in self._all_analysts():
|
||||||
|
existing_id = getattr(agent, "name", None) or getattr(agent, "agent_id", None)
|
||||||
|
if not existing_id or existing_id == agent_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
existing_names = {
|
||||||
|
self._normalize_role_key(existing_id),
|
||||||
|
self._normalize_role_key(self._resolve_agent_display_name(existing_id)),
|
||||||
|
}
|
||||||
|
if requested_names & existing_names:
|
||||||
|
return existing_id
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
async def _collect_final_predictions(
|
async def _collect_final_predictions(
|
||||||
self,
|
self,
|
||||||
tickers: List[str],
|
tickers: List[str],
|
||||||
@@ -1300,6 +1513,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_agent_complete(
|
await self.state_sync.on_agent_complete(
|
||||||
agent_id=analyst.name,
|
agent_id=analyst.name,
|
||||||
content=text_content,
|
content=text_content,
|
||||||
|
agent_name=self._resolve_agent_display_name(analyst.name),
|
||||||
)
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
@@ -1375,6 +1589,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_agent_complete(
|
await self.state_sync.on_agent_complete(
|
||||||
agent_id=analyst.name,
|
agent_id=analyst.name,
|
||||||
content=text_content,
|
content=text_content,
|
||||||
|
agent_name=self._resolve_agent_display_name(analyst.name),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
@@ -1456,6 +1671,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_agent_complete(
|
await self.state_sync.on_agent_complete(
|
||||||
agent_id="risk_manager",
|
agent_id="risk_manager",
|
||||||
content=text_content,
|
content=text_content,
|
||||||
|
agent_name=self._resolve_agent_display_name("risk_manager"),
|
||||||
)
|
)
|
||||||
|
|
||||||
return extracted
|
return extracted
|
||||||
@@ -1542,6 +1758,7 @@ class TradingPipeline:
|
|||||||
await self.state_sync.on_agent_complete(
|
await self.state_sync.on_agent_complete(
|
||||||
agent_id="portfolio_manager",
|
agent_id="portfolio_manager",
|
||||||
content=text_content,
|
content=text_content,
|
||||||
|
agent_name=self._resolve_agent_display_name("portfolio_manager"),
|
||||||
)
|
)
|
||||||
|
|
||||||
return extracted
|
return extracted
|
||||||
@@ -1776,8 +1993,29 @@ class TradingPipeline:
|
|||||||
f"Available: {', '.join(ANALYST_TYPES.keys())}. "
|
f"Available: {', '.join(ANALYST_TYPES.keys())}. "
|
||||||
f"Or provide custom_config to create a custom analyst."
|
f"Or provide custom_config to create a custom analyst."
|
||||||
)
|
)
|
||||||
|
display_name = (
|
||||||
|
custom_config.persona.name
|
||||||
|
if custom_config and custom_config.persona and custom_config.persona.name
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
if not self._contains_cjk(display_name):
|
||||||
|
return (
|
||||||
|
f"Analyst '{agent_id}' requires a Chinese display name. "
|
||||||
|
"Please provide `name` in Chinese characters when creating dynamic analysts."
|
||||||
|
)
|
||||||
if agent_id in {agent.name for agent in self._all_analysts()}:
|
if agent_id in {agent.name for agent in self._all_analysts()}:
|
||||||
return f"Analyst '{agent_id}' already exists."
|
return f"Analyst '{agent_id}' already exists."
|
||||||
|
similar_existing = self._find_similar_existing_analyst(
|
||||||
|
agent_id=agent_id,
|
||||||
|
analyst_type=analyst_type,
|
||||||
|
custom_config=custom_config,
|
||||||
|
)
|
||||||
|
if similar_existing:
|
||||||
|
return (
|
||||||
|
f"Analyst '{agent_id}' is too similar to existing analyst "
|
||||||
|
f"'{similar_existing}'. Reuse or clone the existing analyst instead of "
|
||||||
|
f"creating a duplicate role."
|
||||||
|
)
|
||||||
|
|
||||||
config_name = getattr(self.pm, "config", {}).get("config_name", "default")
|
config_name = getattr(self.pm, "config", {}).get("config_name", "default")
|
||||||
project_root = Path(__file__).resolve().parents[2]
|
project_root = Path(__file__).resolve().parents[2]
|
||||||
@@ -1860,6 +2098,48 @@ class TradingPipeline:
|
|||||||
setattr(agent, "workspace_id", config_name)
|
setattr(agent, "workspace_id", config_name)
|
||||||
self._dynamic_analysts[agent_id] = agent
|
self._dynamic_analysts[agent_id] = agent
|
||||||
|
|
||||||
|
if self.runtime_manager:
|
||||||
|
display_name = None
|
||||||
|
if custom_config and custom_config.persona and custom_config.persona.name:
|
||||||
|
display_name = custom_config.persona.name
|
||||||
|
self.runtime_manager.register_agent(
|
||||||
|
agent_id,
|
||||||
|
display_name=display_name,
|
||||||
|
)
|
||||||
|
self.runtime_manager.log_event(
|
||||||
|
"agent:created",
|
||||||
|
{
|
||||||
|
"agent_id": agent_id,
|
||||||
|
"analyst_type": analyst_type,
|
||||||
|
"display_name": display_name,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Dynamic analyst created: agent_id=%s analyst_type=%s custom=%s",
|
||||||
|
agent_id,
|
||||||
|
analyst_type,
|
||||||
|
bool(custom_config),
|
||||||
|
)
|
||||||
|
if self.state_sync:
|
||||||
|
try:
|
||||||
|
asyncio.create_task(
|
||||||
|
self.state_sync.emit(
|
||||||
|
{
|
||||||
|
"type": "runtime_agents_updated",
|
||||||
|
"action": "created",
|
||||||
|
"agentId": agent_id,
|
||||||
|
"agentName": display_name or self._resolve_agent_display_name(agent_id),
|
||||||
|
},
|
||||||
|
persist=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Failed to broadcast runtime_agents_updated(create) for %s: %s",
|
||||||
|
agent_id,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
|
||||||
# Store custom config for future reference (e.g., cloning)
|
# Store custom config for future reference (e.g., cloning)
|
||||||
if custom_config:
|
if custom_config:
|
||||||
self._dynamic_analyst_configs[agent_id] = custom_config
|
self._dynamic_analyst_configs[agent_id] = custom_config
|
||||||
@@ -1879,6 +2159,31 @@ class TradingPipeline:
|
|||||||
self._dynamic_analysts.pop(agent_id, None)
|
self._dynamic_analysts.pop(agent_id, None)
|
||||||
# Also remove stored config if exists
|
# Also remove stored config if exists
|
||||||
self._dynamic_analyst_configs.pop(agent_id, None)
|
self._dynamic_analyst_configs.pop(agent_id, None)
|
||||||
|
if self.runtime_manager:
|
||||||
|
self.runtime_manager.unregister_agent(agent_id)
|
||||||
|
self.runtime_manager.log_event(
|
||||||
|
"agent:removed",
|
||||||
|
{"agent_id": agent_id},
|
||||||
|
)
|
||||||
|
logger.info("Dynamic analyst removed: agent_id=%s", agent_id)
|
||||||
|
if self.state_sync:
|
||||||
|
try:
|
||||||
|
asyncio.create_task(
|
||||||
|
self.state_sync.emit(
|
||||||
|
{
|
||||||
|
"type": "runtime_agents_updated",
|
||||||
|
"action": "removed",
|
||||||
|
"agentId": agent_id,
|
||||||
|
},
|
||||||
|
persist=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Failed to broadcast runtime_agents_updated(remove) for %s: %s",
|
||||||
|
agent_id,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
config_name = getattr(self.pm, "config", {}).get("config_name", "default")
|
config_name = getattr(self.pm, "config", {}).get("config_name", "default")
|
||||||
project_root = Path(__file__).resolve().parents[2]
|
project_root = Path(__file__).resolve().parents[2]
|
||||||
update_active_analysts(
|
update_active_analysts(
|
||||||
|
|||||||
@@ -123,7 +123,11 @@ class StateSync:
|
|||||||
# Persist to feed_history
|
# Persist to feed_history
|
||||||
if persist:
|
if persist:
|
||||||
self.storage.add_feed_message(self._state, event)
|
self.storage.add_feed_message(self._state, event)
|
||||||
self.save_state()
|
# Make persistence non-blocking to keep event loop snappy
|
||||||
|
if asyncio.get_event_loop().is_running():
|
||||||
|
asyncio.create_task(asyncio.to_thread(self.save_state))
|
||||||
|
else:
|
||||||
|
self.save_state()
|
||||||
|
|
||||||
# Broadcast to frontend
|
# Broadcast to frontend
|
||||||
if self._broadcast_fn:
|
if self._broadcast_fn:
|
||||||
@@ -135,6 +139,7 @@ class StateSync:
|
|||||||
self,
|
self,
|
||||||
agent_id: str,
|
agent_id: str,
|
||||||
content: str,
|
content: str,
|
||||||
|
agent_name: Optional[str] = None,
|
||||||
**extra,
|
**extra,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@@ -151,6 +156,7 @@ class StateSync:
|
|||||||
{
|
{
|
||||||
"type": "agent_message",
|
"type": "agent_message",
|
||||||
"agentId": agent_id,
|
"agentId": agent_id,
|
||||||
|
"agentName": agent_name,
|
||||||
"content": content,
|
"content": content,
|
||||||
"ts": ts_ms,
|
"ts": ts_ms,
|
||||||
**extra,
|
**extra,
|
||||||
@@ -212,7 +218,12 @@ class StateSync:
|
|||||||
persist=False,
|
persist=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def on_conference_message(self, agent_id: str, content: str):
|
async def on_conference_message(
|
||||||
|
self,
|
||||||
|
agent_id: str,
|
||||||
|
content: str,
|
||||||
|
agent_name: Optional[str] = None,
|
||||||
|
):
|
||||||
"""Called when an agent speaks during conference"""
|
"""Called when an agent speaks during conference"""
|
||||||
ts_ms = self._get_timestamp_ms()
|
ts_ms = self._get_timestamp_ms()
|
||||||
|
|
||||||
@@ -220,6 +231,7 @@ class StateSync:
|
|||||||
{
|
{
|
||||||
"type": "conference_message",
|
"type": "conference_message",
|
||||||
"agentId": agent_id,
|
"agentId": agent_id,
|
||||||
|
"agentName": agent_name,
|
||||||
"content": content,
|
"content": content,
|
||||||
"ts": ts_ms,
|
"ts": ts_ms,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -190,8 +190,9 @@ class MarketStore:
|
|||||||
name: str | None = None,
|
name: str | None = None,
|
||||||
sector: str | None = None,
|
sector: str | None = None,
|
||||||
is_active: bool = True,
|
is_active: bool = True,
|
||||||
) -> None:
|
) -> int:
|
||||||
timestamp = _utc_timestamp()
|
timestamp = _utc_timestamp()
|
||||||
|
count = 0
|
||||||
with self._connect() as conn:
|
with self._connect() as conn:
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
@@ -206,6 +207,8 @@ class MarketStore:
|
|||||||
""",
|
""",
|
||||||
(symbol, name, sector, 1 if is_active else 0, timestamp, timestamp),
|
(symbol, name, sector, 1 if is_active else 0, timestamp, timestamp),
|
||||||
)
|
)
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
def update_fetch_watermark(
|
def update_fetch_watermark(
|
||||||
self,
|
self,
|
||||||
@@ -213,8 +216,9 @@ class MarketStore:
|
|||||||
symbol: str,
|
symbol: str,
|
||||||
price_date: str | None = None,
|
price_date: str | None = None,
|
||||||
news_date: str | None = None,
|
news_date: str | None = None,
|
||||||
) -> None:
|
) -> int:
|
||||||
timestamp = _utc_timestamp()
|
timestamp = _utc_timestamp()
|
||||||
|
count = 0
|
||||||
with self._connect() as conn:
|
with self._connect() as conn:
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
@@ -227,6 +231,8 @@ class MarketStore:
|
|||||||
""",
|
""",
|
||||||
(symbol, timestamp, timestamp, price_date, news_date),
|
(symbol, timestamp, timestamp, price_date, news_date),
|
||||||
)
|
)
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
def get_ticker_watermarks(self, symbol: str) -> dict[str, Any]:
|
def get_ticker_watermarks(self, symbol: str) -> dict[str, Any]:
|
||||||
with self._connect() as conn:
|
with self._connect() as conn:
|
||||||
@@ -263,6 +269,8 @@ class MarketStore:
|
|||||||
count = 0
|
count = 0
|
||||||
with self._connect() as conn:
|
with self._connect() as conn:
|
||||||
for row in rows:
|
for row in rows:
|
||||||
|
if not row.get("date"):
|
||||||
|
continue
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
INSERT INTO ohlc
|
INSERT INTO ohlc
|
||||||
@@ -341,6 +349,7 @@ class MarketStore:
|
|||||||
timestamp,
|
timestamp,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
count += 1
|
||||||
for ticker in tickers:
|
for ticker in tickers:
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
@@ -349,7 +358,6 @@ class MarketStore:
|
|||||||
""",
|
""",
|
||||||
(news_id, str(ticker).strip().upper()),
|
(news_id, str(ticker).strip().upper()),
|
||||||
)
|
)
|
||||||
count += 1
|
|
||||||
return count
|
return count
|
||||||
|
|
||||||
def get_news_without_trade_date(self, symbol: str | None = None, *, limit: int = 5000) -> list[dict[str, Any]]:
|
def get_news_without_trade_date(self, symbol: str | None = None, *, limit: int = 5000) -> list[dict[str, Any]]:
|
||||||
@@ -928,8 +936,9 @@ class MarketStore:
|
|||||||
as_of_date: str,
|
as_of_date: str,
|
||||||
content: str,
|
content: str,
|
||||||
source: str = "local",
|
source: str = "local",
|
||||||
) -> None:
|
) -> int:
|
||||||
timestamp = _utc_timestamp()
|
timestamp = _utc_timestamp()
|
||||||
|
count = 0
|
||||||
with self._connect() as conn:
|
with self._connect() as conn:
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
@@ -943,6 +952,8 @@ class MarketStore:
|
|||||||
""",
|
""",
|
||||||
(symbol, as_of_date, content, source, timestamp, timestamp),
|
(symbol, as_of_date, content, source, timestamp, timestamp),
|
||||||
)
|
)
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
def delete_story_cache(
|
def delete_story_cache(
|
||||||
self,
|
self,
|
||||||
@@ -1002,8 +1013,9 @@ class MarketStore:
|
|||||||
target_date: str,
|
target_date: str,
|
||||||
payload: dict[str, Any],
|
payload: dict[str, Any],
|
||||||
source: str = "local",
|
source: str = "local",
|
||||||
) -> None:
|
) -> int:
|
||||||
timestamp = _utc_timestamp()
|
timestamp = _utc_timestamp()
|
||||||
|
count = 0
|
||||||
with self._connect() as conn:
|
with self._connect() as conn:
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"""
|
"""
|
||||||
@@ -1017,6 +1029,8 @@ class MarketStore:
|
|||||||
""",
|
""",
|
||||||
(symbol, target_date, _json_dumps(payload), source, timestamp, timestamp),
|
(symbol, target_date, _json_dumps(payload), source, timestamp, timestamp),
|
||||||
)
|
)
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
def delete_similar_day_cache(
|
def delete_similar_day_cache(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -444,6 +444,16 @@ def create_model(
|
|||||||
"""
|
"""
|
||||||
provider = canonicalize_model_provider(provider)
|
provider = canonicalize_model_provider(provider)
|
||||||
|
|
||||||
|
# If provider is default OPENAI but model name looks like deepseek,
|
||||||
|
# check if we should switch to DASHSCOPE.
|
||||||
|
if provider == "OPENAI" and "deepseek" in model_name.lower() and os.getenv("DASHSCOPE_API_KEY"):
|
||||||
|
provider = "DASHSCOPE"
|
||||||
|
|
||||||
|
# Intelligent routing: if it's a DeepSeek model and we have DashScope credentials,
|
||||||
|
# prefer using DashScopeChatModel over OpenAIChatModel.
|
||||||
|
if provider == "DEEPSEEK" and os.getenv("DASHSCOPE_API_KEY"):
|
||||||
|
provider = "DASHSCOPE"
|
||||||
|
|
||||||
model_class = PROVIDER_MODEL_MAP.get(provider)
|
model_class = PROVIDER_MODEL_MAP.get(provider)
|
||||||
if model_class is None:
|
if model_class is None:
|
||||||
raise ValueError(f"Unsupported provider: {provider}")
|
raise ValueError(f"Unsupported provider: {provider}")
|
||||||
|
|||||||
@@ -1,25 +1,27 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime, UTC
|
from datetime import datetime, timezone
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class AgentRuntimeState:
|
class AgentRuntimeState:
|
||||||
agent_id: str
|
agent_id: str
|
||||||
|
display_name: str | None = None
|
||||||
status: str = "idle"
|
status: str = "idle"
|
||||||
last_session: str | None = None
|
last_session: str | None = None
|
||||||
last_updated: datetime = field(default_factory=lambda: datetime.now(UTC))
|
last_updated: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
|
||||||
def update(self, status: str, session_key: str | None = None) -> None:
|
def update(self, status: str, session_key: str | None = None) -> None:
|
||||||
self.status = status
|
self.status = status
|
||||||
self.last_session = session_key
|
self.last_session = session_key
|
||||||
self.last_updated = datetime.now(UTC)
|
self.last_updated = datetime.now(timezone.utc)
|
||||||
|
|
||||||
def to_dict(self) -> Dict[str, Any]:
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"agent_id": self.agent_id,
|
"agent_id": self.agent_id,
|
||||||
|
"display_name": self.display_name,
|
||||||
"status": self.status,
|
"status": self.status,
|
||||||
"last_session": self.last_session,
|
"last_session": self.last_session,
|
||||||
"last_updated": self.last_updated.isoformat(),
|
"last_updated": self.last_updated.isoformat(),
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
from datetime import datetime, UTC
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ class TradingRuntimeManager:
|
|||||||
|
|
||||||
def log_event(self, event: str, details: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
def log_event(self, event: str, details: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||||
entry = {
|
entry = {
|
||||||
"timestamp": datetime.now(UTC).isoformat(),
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
"event": event,
|
"event": event,
|
||||||
"details": details or {},
|
"details": details or {},
|
||||||
"session": self.current_session_key,
|
"session": self.current_session_key,
|
||||||
@@ -102,15 +102,25 @@ class TradingRuntimeManager:
|
|||||||
self._persist_snapshot()
|
self._persist_snapshot()
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
def register_agent(self, agent_id: str) -> AgentRuntimeState:
|
def register_agent(
|
||||||
state = AgentRuntimeState(agent_id=agent_id)
|
self,
|
||||||
|
agent_id: str,
|
||||||
|
display_name: Optional[str] = None,
|
||||||
|
) -> AgentRuntimeState:
|
||||||
|
state = AgentRuntimeState(agent_id=agent_id, display_name=display_name)
|
||||||
self.registry.register(agent_id, state)
|
self.registry.register(agent_id, state)
|
||||||
self._persist_snapshot()
|
self._persist_snapshot()
|
||||||
return state
|
return state
|
||||||
|
|
||||||
|
def unregister_agent(self, agent_id: str) -> Optional[AgentRuntimeState]:
|
||||||
|
state = self.registry.unregister(agent_id)
|
||||||
|
if state is not None:
|
||||||
|
self._persist_snapshot()
|
||||||
|
return state
|
||||||
|
|
||||||
def register_pending_approval(self, approval_id: str, payload: Dict[str, Any]) -> None:
|
def register_pending_approval(self, approval_id: str, payload: Dict[str, Any]) -> None:
|
||||||
payload.setdefault("status", "pending")
|
payload.setdefault("status", "pending")
|
||||||
payload.setdefault("created_at", datetime.now(UTC).isoformat())
|
payload.setdefault("created_at", datetime.now(timezone.utc).isoformat())
|
||||||
self.pending_approvals[approval_id] = payload
|
self.pending_approvals[approval_id] = payload
|
||||||
self._persist_snapshot()
|
self._persist_snapshot()
|
||||||
|
|
||||||
@@ -139,7 +149,7 @@ class TradingRuntimeManager:
|
|||||||
if not entry:
|
if not entry:
|
||||||
return
|
return
|
||||||
entry["status"] = status
|
entry["status"] = status
|
||||||
entry["resolved_at"] = datetime.now(UTC).isoformat()
|
entry["resolved_at"] = datetime.now(timezone.utc).isoformat()
|
||||||
entry["resolved_by"] = resolved_by
|
entry["resolved_by"] = resolved_by
|
||||||
self._persist_snapshot()
|
self._persist_snapshot()
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ class RuntimeRegistry:
|
|||||||
def get(self, agent_id: str) -> Optional["AgentRuntimeState"]:
|
def get(self, agent_id: str) -> Optional["AgentRuntimeState"]:
|
||||||
return self._states.get(agent_id)
|
return self._states.get(agent_id)
|
||||||
|
|
||||||
|
def unregister(self, agent_id: str) -> Optional["AgentRuntimeState"]:
|
||||||
|
return self._states.pop(agent_id, None)
|
||||||
|
|
||||||
def list_agents(self) -> list[str]:
|
def list_agents(self) -> list[str]:
|
||||||
return list(self._states.keys())
|
return list(self._states.keys())
|
||||||
|
|
||||||
|
|||||||
@@ -148,8 +148,9 @@ class Gateway:
|
|||||||
self.handle_client,
|
self.handle_client,
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
ping_interval=30,
|
ping_interval=20,
|
||||||
ping_timeout=60,
|
ping_timeout=120,
|
||||||
|
max_size=10 * 1024 * 1024, # 10MB
|
||||||
)
|
)
|
||||||
logger.info(f"WebSocket server ready: ws://{host}:{port}")
|
logger.info(f"WebSocket server ready: ws://{host}:{port}")
|
||||||
|
|
||||||
@@ -833,12 +834,18 @@ class Gateway:
|
|||||||
if not self.connected_clients:
|
if not self.connected_clients:
|
||||||
return
|
return
|
||||||
|
|
||||||
message_json = json.dumps(message, ensure_ascii=False, default=str)
|
# Offload potentially heavy JSON serialization to thread
|
||||||
|
message_json = await asyncio.to_thread(
|
||||||
|
json.dumps, message, ensure_ascii=False, default=str
|
||||||
|
)
|
||||||
|
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
|
# Filter only active clients to minimize unnecessary send attempts
|
||||||
|
# In websockets v13+, we must check state.name == 'OPEN'
|
||||||
|
active_clients = [c for c in self.connected_clients if c.state.name == 'OPEN']
|
||||||
tasks = [
|
tasks = [
|
||||||
self._send_to_client(client, message_json)
|
self._send_to_client(client, message_json)
|
||||||
for client in self.connected_clients.copy()
|
for client in active_clients
|
||||||
]
|
]
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
@@ -849,9 +856,14 @@ class Gateway:
|
|||||||
client: ServerConnection,
|
client: ServerConnection,
|
||||||
message: str,
|
message: str,
|
||||||
):
|
):
|
||||||
|
if client.state.name != 'OPEN':
|
||||||
|
async with self.lock:
|
||||||
|
self.connected_clients.discard(client)
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await client.send(message)
|
await client.send(message)
|
||||||
except websockets.ConnectionClosed:
|
except (websockets.ConnectionClosed, Exception):
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
self.connected_clients.discard(client)
|
self.connected_clients.discard(client)
|
||||||
|
|
||||||
|
|||||||
@@ -253,7 +253,8 @@ async def finalize_cycle(gateway: Any, date: str) -> None:
|
|||||||
|
|
||||||
async def get_market_caps(gateway: Any, tickers: list[str], date: str) -> dict[str, float]:
|
async def get_market_caps(gateway: Any, tickers: list[str], date: str) -> dict[str, float]:
|
||||||
market_caps: dict[str, float] = {}
|
market_caps: dict[str, float] = {}
|
||||||
for ticker in tickers:
|
|
||||||
|
async def _get_one(ticker: str):
|
||||||
try:
|
try:
|
||||||
market_cap = None
|
market_cap = None
|
||||||
response = await gateway._call_trading_service(
|
response = await gateway._call_trading_service(
|
||||||
@@ -263,12 +264,21 @@ async def get_market_caps(gateway: Any, tickers: list[str], date: str) -> dict[s
|
|||||||
if response is not None:
|
if response is not None:
|
||||||
market_cap = response.get("market_cap")
|
market_cap = response.get("market_cap")
|
||||||
if market_cap is None:
|
if market_cap is None:
|
||||||
payload = trading_domain.get_market_cap_payload(ticker=ticker, end_date=date)
|
payload = await asyncio.to_thread(
|
||||||
|
trading_domain.get_market_cap_payload,
|
||||||
|
ticker=ticker,
|
||||||
|
end_date=date,
|
||||||
|
)
|
||||||
market_cap = payload.get("market_cap")
|
market_cap = payload.get("market_cap")
|
||||||
market_caps[ticker] = market_cap if market_cap else 1e9
|
return ticker, (market_cap if market_cap else 1e9)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning("Failed to get market cap for %s, using default 1e9: %s", ticker, exc)
|
logger.warning("Failed to get market cap for %s, using default 1e9: %s", ticker, exc)
|
||||||
market_caps[ticker] = 1e9
|
return ticker, 1e9
|
||||||
|
|
||||||
|
tasks = [_get_one(ticker) for ticker in tickers]
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
for ticker, mc in results:
|
||||||
|
market_caps[ticker] = mc
|
||||||
return market_caps
|
return market_caps
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -517,111 +517,129 @@ async def handle_get_stock_similar_days(gateway: Any, websocket: Any, data: dict
|
|||||||
|
|
||||||
|
|
||||||
async def handle_get_stock_technical_indicators(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
async def handle_get_stock_technical_indicators(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
||||||
ticker = normalize_symbol(data.get("ticker", ""))
|
ticker = normalize_symbol(data.get("ticker", ""))
|
||||||
if not ticker:
|
if not ticker:
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(json.dumps({
|
||||||
"type": "stock_technical_indicators_loaded",
|
"type": "stock_technical_indicators_loaded",
|
||||||
"ticker": ticker,
|
"ticker": ticker,
|
||||||
"indicators": None,
|
"indicators": None,
|
||||||
"error": "ticker is required",
|
"error": "ticker is required",
|
||||||
}, ensure_ascii=False))
|
}, ensure_ascii=False))
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
end_date = datetime.now()
|
end_date = datetime.now()
|
||||||
start_date = end_date - timedelta(days=250)
|
# Reduced from 250 to 150 days to lower CPU/memory pressure while still supporting MA200 (approx 140 trading days)
|
||||||
|
start_date = end_date - timedelta(days=150)
|
||||||
|
|
||||||
prices = None
|
prices = None
|
||||||
response = await gateway._call_trading_service(
|
response = await gateway._call_trading_service(
|
||||||
"get_prices",
|
"get_prices",
|
||||||
lambda client: client.get_prices(
|
lambda client: client.get_prices(
|
||||||
ticker=ticker,
|
ticker=ticker,
|
||||||
start_date=start_date.strftime("%Y-%m-%d"),
|
start_date=start_date.strftime("%Y-%m-%d"),
|
||||||
end_date=end_date.strftime("%Y-%m-%d"),
|
end_date=end_date.strftime("%Y-%m-%d"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
if response is not None:
|
if response is not None:
|
||||||
prices = response.prices
|
prices = response.prices
|
||||||
|
|
||||||
if prices is None:
|
if prices is None:
|
||||||
payload = trading_domain.get_prices_payload(
|
# Offload domain logic to thread
|
||||||
ticker=ticker,
|
payload = await asyncio.to_thread(
|
||||||
start_date=start_date.strftime("%Y-%m-%d"),
|
trading_domain.get_prices_payload,
|
||||||
end_date=end_date.strftime("%Y-%m-%d"),
|
ticker=ticker,
|
||||||
)
|
start_date=start_date.strftime("%Y-%m-%d"),
|
||||||
prices = payload.get("prices") or []
|
end_date=end_date.strftime("%Y-%m-%d"),
|
||||||
|
)
|
||||||
|
prices = payload.get("prices") or []
|
||||||
|
|
||||||
if not prices or len(prices) < 20:
|
if not prices or len(prices) < 20:
|
||||||
await websocket.send(json.dumps({
|
await websocket.send(json.dumps({
|
||||||
"type": "stock_technical_indicators_loaded",
|
"type": "stock_technical_indicators_loaded",
|
||||||
"ticker": ticker,
|
"ticker": ticker,
|
||||||
"indicators": None,
|
"indicators": None,
|
||||||
"error": "Insufficient price data",
|
"error": "Insufficient price data",
|
||||||
}, ensure_ascii=False))
|
}, ensure_ascii=False))
|
||||||
return
|
return
|
||||||
|
|
||||||
df = prices_to_df(prices)
|
def _calc():
|
||||||
signal = gateway._technical_analyzer.analyze(ticker, df)
|
df = prices_to_df(prices)
|
||||||
|
signal = gateway._technical_analyzer.analyze(ticker, df)
|
||||||
|
df_sorted = df.sort_values("time").reset_index(drop=True)
|
||||||
|
df_sorted["returns"] = df_sorted["close"].pct_change()
|
||||||
|
v10 = float(df_sorted["returns"].tail(10).std() * (252**0.5) * 100) if len(df_sorted) >= 10 else None
|
||||||
|
v20 = float(df_sorted["returns"].tail(20).std() * (252**0.5) * 100) if len(df_sorted) >= 20 else None
|
||||||
|
v60 = float(df_sorted["returns"].tail(60).std() * (252**0.5) * 100) if len(df_sorted) >= 60 else None
|
||||||
|
|
||||||
df_sorted = df.sort_values("time").reset_index(drop=True)
|
ma_dist = {}
|
||||||
df_sorted["returns"] = df_sorted["close"].pct_change()
|
for ma_key in ["ma5", "ma10", "ma20", "ma50", "ma200"]:
|
||||||
vol_10 = float(df_sorted["returns"].tail(10).std() * (252**0.5) * 100) if len(df_sorted) >= 10 else None
|
ma_val = getattr(signal, ma_key, None)
|
||||||
vol_20 = float(df_sorted["returns"].tail(20).std() * (252**0.5) * 100) if len(df_sorted) >= 20 else None
|
ma_dist[ma_key] = ((signal.current_price - ma_val) / ma_val) * 100 if ma_val and ma_val > 0 else None
|
||||||
vol_60 = float(df_sorted["returns"].tail(60).std() * (252**0.5) * 100) if len(df_sorted) >= 60 else None
|
|
||||||
ma_distance = {}
|
|
||||||
for ma_key in ["ma5", "ma10", "ma20", "ma50", "ma200"]:
|
|
||||||
ma_value = getattr(signal, ma_key, None)
|
|
||||||
ma_distance[ma_key] = ((signal.current_price - ma_value) / ma_value) * 100 if ma_value and ma_value > 0 else None
|
|
||||||
|
|
||||||
indicators = {
|
return {
|
||||||
"ticker": ticker,
|
"ticker": ticker,
|
||||||
"current_price": signal.current_price,
|
"current_price": signal.current_price,
|
||||||
"ma": {
|
"ma": {
|
||||||
"ma5": signal.ma5,
|
"ma5": signal.ma5,
|
||||||
"ma10": signal.ma10,
|
"ma10": signal.ma10,
|
||||||
"ma20": signal.ma20,
|
"ma20": signal.ma20,
|
||||||
"ma50": signal.ma50,
|
"ma50": signal.ma50,
|
||||||
"ma200": signal.ma200,
|
"ma200": signal.ma200,
|
||||||
"distance": ma_distance,
|
"distance": ma_dist,
|
||||||
},
|
},
|
||||||
"rsi": {
|
"rsi": {
|
||||||
"rsi14": signal.rsi14,
|
"rsi14": signal.rsi14,
|
||||||
"status": "oversold" if signal.rsi14 < 30 else "overbought" if signal.rsi14 > 70 else "neutral",
|
"status": "oversold" if signal.rsi14 < 30 else "overbought" if signal.rsi14 > 70 else "neutral",
|
||||||
},
|
},
|
||||||
"macd": {
|
"macd": {
|
||||||
"macd": signal.macd,
|
"macd": signal.macd,
|
||||||
"signal": signal.macd_signal,
|
"signal": signal.macd_signal,
|
||||||
"histogram": signal.macd - signal.macd_signal,
|
"histogram": signal.macd - signal.macd_signal,
|
||||||
},
|
},
|
||||||
"bollinger": {
|
"bollinger": {
|
||||||
"upper": signal.bollinger_upper,
|
"upper": signal.bollinger_upper,
|
||||||
"mid": signal.bollinger_mid,
|
"mid": signal.bollinger_mid,
|
||||||
"lower": signal.bollinger_lower,
|
"lower": signal.bollinger_lower,
|
||||||
},
|
},
|
||||||
"volatility": {
|
"volatility": {
|
||||||
"vol_10d": vol_10,
|
"vol_10d": v10,
|
||||||
"vol_20d": vol_20,
|
"vol_20d": v20,
|
||||||
"vol_60d": vol_60,
|
"vol_60d": v60,
|
||||||
"annualized": signal.annualized_volatility_pct,
|
"annualized": signal.annualized_volatility_pct,
|
||||||
"risk_level": signal.risk_level,
|
"risk_level": signal.risk_level,
|
||||||
},
|
},
|
||||||
"trend": signal.trend,
|
"trend": signal.trend,
|
||||||
"mean_reversion": signal.mean_reversion_signal,
|
"mean_reversion": signal.mean_reversion_signal,
|
||||||
}
|
}
|
||||||
|
|
||||||
await websocket.send(json.dumps({
|
# Use a semaphore to prevent too many concurrent CPU-intensive calculations
|
||||||
"type": "stock_technical_indicators_loaded",
|
# which can block the event loop heartbeats.
|
||||||
"ticker": ticker,
|
if not hasattr(gateway, "_calc_sem"):
|
||||||
"indicators": indicators,
|
gateway._calc_sem = asyncio.Semaphore(3)
|
||||||
}, ensure_ascii=False, default=str))
|
|
||||||
except Exception as exc:
|
async with gateway._calc_sem:
|
||||||
logger.exception("Error getting technical indicators for %s", ticker)
|
indicators = await asyncio.to_thread(_calc)
|
||||||
await websocket.send(json.dumps({
|
|
||||||
"type": "stock_technical_indicators_loaded",
|
# Also offload JSON serialization to thread to avoid blocking main loop
|
||||||
"ticker": ticker,
|
msg = await asyncio.to_thread(json.dumps, {
|
||||||
"indicators": None,
|
"type": "stock_technical_indicators_loaded",
|
||||||
"error": str(exc),
|
"ticker": ticker,
|
||||||
}, ensure_ascii=False))
|
"indicators": indicators,
|
||||||
|
}, ensure_ascii=False, default=str)
|
||||||
|
|
||||||
|
if websocket.state.name == 'OPEN':
|
||||||
|
await websocket.send(msg)
|
||||||
|
else:
|
||||||
|
logger.warning("Websocket closed for %s, skipping indicator send", ticker)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.exception("Error getting technical indicators for %s", ticker)
|
||||||
|
await websocket.send(json.dumps({
|
||||||
|
"type": "stock_technical_indicators_loaded",
|
||||||
|
"ticker": ticker,
|
||||||
|
"indicators": None,
|
||||||
|
"error": str(exc),
|
||||||
|
}, ensure_ascii=False))
|
||||||
|
|
||||||
|
|
||||||
async def handle_run_stock_enrich(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
async def handle_run_stock_enrich(gateway: Any, websocket: Any, data: dict[str, Any]) -> None:
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ Handles reading/writing dashboard JSON files and portfolio state
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
@@ -950,11 +951,14 @@ class StorageService:
|
|||||||
|
|
||||||
def save_server_state(self, state: Dict[str, Any]):
|
def save_server_state(self, state: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
Save server state to file
|
Save server state to file with rate-limiting to avoid I/O storms.
|
||||||
|
|
||||||
Args:
|
|
||||||
state: Server state dictionary
|
|
||||||
"""
|
"""
|
||||||
|
now = time.time()
|
||||||
|
# Ensure at least 2 seconds between physical disk writes
|
||||||
|
if hasattr(self, "_last_save_time") and (now - self._last_save_time) < 2.0:
|
||||||
|
return
|
||||||
|
self._last_save_time = now
|
||||||
|
|
||||||
state_to_save = {
|
state_to_save = {
|
||||||
**state,
|
**state,
|
||||||
"last_saved": datetime.now().isoformat(),
|
"last_saved": datetime.now().isoformat(),
|
||||||
@@ -970,14 +974,17 @@ class StorageService:
|
|||||||
if "trades" in state_to_save:
|
if "trades" in state_to_save:
|
||||||
state_to_save["trades"] = state_to_save["trades"][:100]
|
state_to_save["trades"] = state_to_save["trades"][:100]
|
||||||
|
|
||||||
with open(self.server_state_file, "w", encoding="utf-8") as f:
|
try:
|
||||||
json.dump(
|
with open(self.server_state_file, "w", encoding="utf-8") as f:
|
||||||
state_to_save,
|
# Removed indent=2 to minimize file size and serialization overhead
|
||||||
f,
|
json.dump(
|
||||||
ensure_ascii=False,
|
state_to_save,
|
||||||
indent=2,
|
f,
|
||||||
default=str,
|
ensure_ascii=False,
|
||||||
)
|
default=str,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save server state: {e}")
|
||||||
|
|
||||||
logger.debug(f"Server state saved to: {self.server_state_file}")
|
logger.debug(f"Server state saved to: {self.server_state_file}")
|
||||||
|
|
||||||
|
|||||||
@@ -23,15 +23,17 @@ version: 1.0.0
|
|||||||
## 3) Decision procedure
|
## 3) Decision procedure
|
||||||
|
|
||||||
1. 汇总并比较 analyst 信号,识别共识与分歧。
|
1. 汇总并比较 analyst 信号,识别共识与分歧。
|
||||||
2. 将风险警示映射到仓位上限与禁开条件。
|
2. 先判断当前团队是否覆盖了本轮任务所需的专业能力;若未覆盖,优先扩编团队而不是直接仲裁。
|
||||||
3. 在资金与保证金约束下,为每个 ticker 生成候选动作与数量。
|
3. 将风险警示映射到仓位上限与禁开条件。
|
||||||
4. 对冲突信号执行保守仲裁:降低仓位、提高触发门槛或改为 `hold`。
|
4. 在资金与保证金约束下,为每个 ticker 生成候选动作与数量。
|
||||||
5. 逐个 ticker 记录最终决策,并给出组合级理由。
|
5. 对冲突信号执行保守仲裁:降低仓位、提高触发门槛、补充 analyst,或改为 `hold`。
|
||||||
|
6. 逐个 ticker 记录最终决策,并给出组合级理由。
|
||||||
|
|
||||||
## 4) Tool call policy
|
## 4) Tool call policy
|
||||||
|
|
||||||
- 必须使用决策工具记录每个 ticker 的最终 `action/quantity`。
|
- 必须使用决策工具记录每个 ticker 的最终 `action/quantity`。
|
||||||
- 在讨论阶段如发现当前团队能力不足,可使用团队工具动态创建或移除 analyst(再继续讨论)。
|
- 在讨论阶段如发现当前团队能力不足、证据链断裂、或观点冲突无法裁决,必须优先使用团队工具动态创建或克隆 analyst(再继续讨论)。
|
||||||
|
- 如果已经判断“需要更多专业分析”,但没有调用动态团队工具补齐团队,则不得输出高置信度最终决策。
|
||||||
- 若风险工具提示阻断项,优先遵循阻断,不得绕过。
|
- 若风险工具提示阻断项,优先遵循阻断,不得绕过。
|
||||||
- 工具调用失败时:重试一次;仍失败则输出结构化“未完成决策清单”和人工处理建议。
|
- 工具调用失败时:重试一次;仍失败则输出结构化“未完成决策清单”和人工处理建议。
|
||||||
|
|
||||||
@@ -46,5 +48,6 @@ version: 1.0.0
|
|||||||
## 6) Failure fallback
|
## 6) Failure fallback
|
||||||
|
|
||||||
- 当分析师信号与风险结论显著冲突时,默认采用更小仓位或 `hold`。
|
- 当分析师信号与风险结论显著冲突时,默认采用更小仓位或 `hold`。
|
||||||
|
- 当任务明显超出当前团队能力边界时,优先扩编团队;如果扩编失败,再降级为 `hold` 或条件决策草案。
|
||||||
- 当约束校验失败(现金/保证金不足)时,自动下调数量,不输出不可执行指令。
|
- 当约束校验失败(现金/保证金不足)时,自动下调数量,不输出不可执行指令。
|
||||||
- 当任务要求完整清单时,不允许遗漏 ticker;无法决策时必须显式标记 `hold` 并说明原因。
|
- 当任务要求完整清单时,不允许遗漏 ticker;无法决策时必须显式标记 `hold` 并说明原因。
|
||||||
|
|||||||
@@ -10,12 +10,15 @@ description: 整合分析师观点与风险反馈,形成明确的组合层决
|
|||||||
## 工作流程
|
## 工作流程
|
||||||
|
|
||||||
1. 行动前先阅读分析师结论和风险警示。
|
1. 行动前先阅读分析师结论和风险警示。
|
||||||
2. 评估当前组合、现金和保证金约束。
|
2. 先判断当前团队是否足以覆盖本轮任务;如果不够,先扩编团队再继续。
|
||||||
3. 使用决策工具为每个 ticker 记录一个明确决策。
|
3. 评估当前组合、现金和保证金约束。
|
||||||
4. 在全部决策记录完成后,总结组合层面的整体理由。
|
4. 使用决策工具为每个 ticker 记录一个明确决策。
|
||||||
|
5. 在全部决策记录完成后,总结组合层面的整体理由。
|
||||||
|
|
||||||
## 约束
|
## 约束
|
||||||
|
|
||||||
- 仓位大小必须遵守资金和保证金限制。
|
- 仓位大小必须遵守资金和保证金限制。
|
||||||
- 当分析师信心与风险信号不一致时,优先采用更小仓位。
|
- 当分析师信心与风险信号不一致时,优先采用更小仓位。
|
||||||
|
- 当任务超出当前团队能力边界时,应优先使用动态团队工具创建或克隆分析师。
|
||||||
|
- 如果已经识别出覆盖缺口,不应跳过扩编步骤直接给出高置信度结论。
|
||||||
- 当任务要求完整决策清单时,不要让任何 ticker 处于未决状态。
|
- 当任务要求完整决策清单时,不要让任何 ticker 处于未决状态。
|
||||||
|
|||||||
@@ -305,71 +305,6 @@ def test_pipeline_create_runtime_analyst_uses_evo_agent_when_enabled(monkeypatch
|
|||||||
assert created.get("config_name") == "demo"
|
assert created.get("config_name") == "demo"
|
||||||
|
|
||||||
|
|
||||||
def test_pipeline_create_runtime_analyst_uses_legacy_when_not_in_evo_ids(monkeypatch, tmp_path):
|
|
||||||
"""Test that _create_runtime_analyst creates legacy AnalystAgent when not in EVO_AGENT_IDS."""
|
|
||||||
from backend.core import pipeline as pipeline_module
|
|
||||||
|
|
||||||
created = {}
|
|
||||||
|
|
||||||
class DummyEvoAgent:
|
|
||||||
name = "test_analyst"
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
created.update(kwargs)
|
|
||||||
self.toolkit = None
|
|
||||||
|
|
||||||
class DummyAnalystAgent:
|
|
||||||
name = "test_analyst"
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
created.update(kwargs)
|
|
||||||
self.toolkit = None
|
|
||||||
|
|
||||||
# EVO_AGENT_IDS does not include fundamentals_analyst
|
|
||||||
monkeypatch.setenv("EVO_AGENT_IDS", "technical_analyst")
|
|
||||||
monkeypatch.setattr(pipeline_module, "EvoAgent", DummyEvoAgent)
|
|
||||||
monkeypatch.setattr(pipeline_module, "AnalystAgent", DummyAnalystAgent)
|
|
||||||
monkeypatch.setattr(
|
|
||||||
pipeline_module,
|
|
||||||
"create_agent_toolkit",
|
|
||||||
lambda *args, **kwargs: "toolkit",
|
|
||||||
)
|
|
||||||
monkeypatch.setattr(
|
|
||||||
pipeline_module,
|
|
||||||
"get_agent_model",
|
|
||||||
lambda x: "model",
|
|
||||||
)
|
|
||||||
monkeypatch.setattr(
|
|
||||||
pipeline_module,
|
|
||||||
"get_agent_formatter",
|
|
||||||
lambda x: "formatter",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a mock pipeline instance
|
|
||||||
class MockPM:
|
|
||||||
def __init__(self):
|
|
||||||
self.config = {"config_name": "demo"}
|
|
||||||
|
|
||||||
pipeline = pipeline_module.TradingPipeline(
|
|
||||||
analysts=[],
|
|
||||||
risk_manager=None,
|
|
||||||
portfolio_manager=MockPM(),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Mock workspace_manager methods
|
|
||||||
monkeypatch.setattr(
|
|
||||||
pipeline_module.WorkspaceManager,
|
|
||||||
"ensure_agent_assets",
|
|
||||||
lambda *args, **kwargs: None,
|
|
||||||
)
|
|
||||||
|
|
||||||
result = pipeline._create_runtime_analyst("test_analyst", "fundamentals_analyst")
|
|
||||||
|
|
||||||
assert "Created runtime analyst" in result
|
|
||||||
# Should use legacy AnalystAgent
|
|
||||||
assert created.get("analyst_type") == "fundamentals_analyst"
|
|
||||||
|
|
||||||
|
|
||||||
def test_main_resolve_evo_agent_ids_returns_all_by_default(monkeypatch):
|
def test_main_resolve_evo_agent_ids_returns_all_by_default(monkeypatch):
|
||||||
"""Test that _resolve_evo_agent_ids returns all supported roles by default."""
|
"""Test that _resolve_evo_agent_ids returns all supported roles by default."""
|
||||||
from backend.core import pipeline_runner as runner_module
|
from backend.core import pipeline_runner as runner_module
|
||||||
@@ -427,11 +362,3 @@ def test_evo_agent_supports_long_term_memory(monkeypatch, tmp_path):
|
|||||||
assert created["long_term_memory"] is dummy_memory
|
assert created["long_term_memory"] is dummy_memory
|
||||||
|
|
||||||
|
|
||||||
def test_evo_agent_legacy_mode(monkeypatch):
|
|
||||||
"""Test that EVO_AGENT_IDS=legacy disables EvoAgent."""
|
|
||||||
from backend import main as main_module
|
|
||||||
|
|
||||||
monkeypatch.setenv("EVO_AGENT_IDS", "legacy")
|
|
||||||
|
|
||||||
resolved = main_module._resolve_evo_agent_ids()
|
|
||||||
assert resolved == set()
|
|
||||||
|
|||||||
@@ -103,18 +103,28 @@ def _safe_float(value, default=0.0) -> float:
|
|||||||
|
|
||||||
|
|
||||||
def safe(func):
|
def safe(func):
|
||||||
"""Decorator to catch exceptions in tool functions."""
|
"""Decorator to catch exceptions in both sync and async tool functions."""
|
||||||
|
|
||||||
@wraps(func)
|
if asyncio.iscoroutinefunction(func):
|
||||||
def wrapper(*args, **kwargs):
|
@wraps(func)
|
||||||
try:
|
async def async_wrapper(*args, **kwargs):
|
||||||
return func(*args, **kwargs)
|
try:
|
||||||
except Exception as e:
|
return await func(*args, **kwargs)
|
||||||
error_msg = f"Error in {func.__name__}: {str(e)}"
|
except Exception as e:
|
||||||
logger.error(f"{error_msg}\n{traceback.format_exc()}")
|
error_msg = f"Error in {func.__name__}: {str(e)}"
|
||||||
return _to_text_response(f"[ERROR] {error_msg}")
|
logger.error(f"{error_msg}\n{traceback.format_exc()}")
|
||||||
|
return _to_text_response(f"[ERROR] {error_msg}")
|
||||||
return wrapper
|
return async_wrapper
|
||||||
|
else:
|
||||||
|
@wraps(func)
|
||||||
|
def sync_wrapper(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Error in {func.__name__}: {str(e)}"
|
||||||
|
logger.error(f"{error_msg}\n{traceback.format_exc()}")
|
||||||
|
return _to_text_response(f"[ERROR] {error_msg}")
|
||||||
|
return sync_wrapper
|
||||||
|
|
||||||
|
|
||||||
def _fmt(val, fmt=".2f", suffix="") -> str:
|
def _fmt(val, fmt=".2f", suffix="") -> str:
|
||||||
@@ -141,7 +151,7 @@ def _resolved_date(current_date: Optional[str]) -> str:
|
|||||||
|
|
||||||
|
|
||||||
@safe
|
@safe
|
||||||
def analyze_efficiency_ratios(
|
async def analyze_efficiency_ratios(
|
||||||
tickers: Optional[List[str]] = None,
|
tickers: Optional[List[str]] = None,
|
||||||
current_date: Optional[str] = None,
|
current_date: Optional[str] = None,
|
||||||
) -> ToolResponse:
|
) -> ToolResponse:
|
||||||
@@ -163,21 +173,26 @@ def analyze_efficiency_ratios(
|
|||||||
tickers = _parse_tickers(tickers)
|
tickers = _parse_tickers(tickers)
|
||||||
lines = [f"=== Efficiency Ratios Analysis ({current_date}) ===\n"]
|
lines = [f"=== Efficiency Ratios Analysis ({current_date}) ===\n"]
|
||||||
|
|
||||||
for ticker in tickers:
|
async def _fetch_one(ticker):
|
||||||
metrics = get_financial_metrics(ticker=ticker, end_date=current_date)
|
try:
|
||||||
if not metrics:
|
metrics = await asyncio.to_thread(get_financial_metrics, ticker=ticker, end_date=current_date)
|
||||||
lines.append(f"{ticker}: No data available\n")
|
if not metrics:
|
||||||
continue
|
return f"{ticker}: No data available\n"
|
||||||
|
|
||||||
m = metrics[0]
|
m = metrics[0]
|
||||||
lines.append(f"{ticker}:")
|
ticker_lines = [
|
||||||
lines.append(f" Asset Turnover: {_fmt(m.asset_turnover)}")
|
f"{ticker}:",
|
||||||
lines.append(f" Inventory Turnover: {_fmt(m.inventory_turnover)}")
|
f" Asset Turnover: {_fmt(m.asset_turnover)}",
|
||||||
lines.append(f" Receivables Turnover: {_fmt(m.receivables_turnover)}")
|
f" Inventory Turnover: {_fmt(m.inventory_turnover)}",
|
||||||
lines.append(
|
f" Receivables Turnover: {_fmt(m.receivables_turnover)}",
|
||||||
f" Working Capital Turnover: {_fmt(m.working_capital_turnover)}",
|
f" Working Capital Turnover: {_fmt(m.working_capital_turnover)}\n",
|
||||||
)
|
]
|
||||||
lines.append("")
|
return "\n".join(ticker_lines)
|
||||||
|
except Exception as e:
|
||||||
|
return f"{ticker}: Error - {str(e)}\n"
|
||||||
|
|
||||||
|
results = await asyncio.gather(*[_fetch_one(t) for t in tickers])
|
||||||
|
lines.extend(results)
|
||||||
|
|
||||||
return _to_text_response("\n".join(lines))
|
return _to_text_response("\n".join(lines))
|
||||||
|
|
||||||
@@ -310,7 +325,7 @@ def analyze_financial_health(
|
|||||||
|
|
||||||
|
|
||||||
@safe
|
@safe
|
||||||
def analyze_valuation_ratios(
|
async def analyze_valuation_ratios(
|
||||||
tickers: Optional[List[str]] = None,
|
tickers: Optional[List[str]] = None,
|
||||||
current_date: Optional[str] = None,
|
current_date: Optional[str] = None,
|
||||||
) -> ToolResponse:
|
) -> ToolResponse:
|
||||||
@@ -332,24 +347,31 @@ def analyze_valuation_ratios(
|
|||||||
tickers = _parse_tickers(tickers)
|
tickers = _parse_tickers(tickers)
|
||||||
lines = [f"=== Valuation Ratios Analysis ({current_date}) ===\n"]
|
lines = [f"=== Valuation Ratios Analysis ({current_date}) ===\n"]
|
||||||
|
|
||||||
for ticker in tickers:
|
async def _fetch_one(ticker):
|
||||||
metrics = get_financial_metrics(ticker=ticker, end_date=current_date)
|
try:
|
||||||
if not metrics:
|
metrics = await asyncio.to_thread(get_financial_metrics, ticker=ticker, end_date=current_date)
|
||||||
lines.append(f"{ticker}: No data available\n")
|
if not metrics:
|
||||||
continue
|
return f"{ticker}: No data available\n"
|
||||||
|
|
||||||
m = metrics[0]
|
m = metrics[0]
|
||||||
lines.append(f"{ticker}:")
|
ticker_lines = [
|
||||||
lines.append(f" P/E Ratio: {_fmt(m.price_to_earnings_ratio)}")
|
f"{ticker}:",
|
||||||
lines.append(f" P/B Ratio: {_fmt(m.price_to_book_ratio)}")
|
f" P/E Ratio: {_fmt(m.price_to_earnings_ratio)}",
|
||||||
lines.append(f" P/S Ratio: {_fmt(m.price_to_sales_ratio)}")
|
f" P/B Ratio: {_fmt(m.price_to_book_ratio)}",
|
||||||
lines.append("")
|
f" P/S Ratio: {_fmt(m.price_to_sales_ratio)}\n",
|
||||||
|
]
|
||||||
|
return "\n".join(ticker_lines)
|
||||||
|
except Exception as e:
|
||||||
|
return f"{ticker}: Error - {str(e)}\n"
|
||||||
|
|
||||||
|
results = await asyncio.gather(*[_fetch_one(t) for t in tickers])
|
||||||
|
lines.extend(results)
|
||||||
|
|
||||||
return _to_text_response("\n".join(lines))
|
return _to_text_response("\n".join(lines))
|
||||||
|
|
||||||
|
|
||||||
@safe
|
@safe
|
||||||
def get_financial_metrics_tool(
|
async def get_financial_metrics_tool(
|
||||||
tickers: Optional[List[str]] = None,
|
tickers: Optional[List[str]] = None,
|
||||||
current_date: Optional[str] = None,
|
current_date: Optional[str] = None,
|
||||||
period: str = "ttm",
|
period: str = "ttm",
|
||||||
@@ -374,35 +396,35 @@ def get_financial_metrics_tool(
|
|||||||
f"=== Comprehensive Financial Metrics ({current_date}, {period}) ===\n",
|
f"=== Comprehensive Financial Metrics ({current_date}, {period}) ===\n",
|
||||||
]
|
]
|
||||||
|
|
||||||
for ticker in tickers:
|
async def _fetch_one(ticker):
|
||||||
metrics = get_financial_metrics(
|
try:
|
||||||
ticker=ticker,
|
# Offload synchronous data fetching to thread to keep loop snappy
|
||||||
end_date=current_date,
|
metrics = await asyncio.to_thread(
|
||||||
period=period,
|
get_financial_metrics,
|
||||||
)
|
ticker=ticker,
|
||||||
if not metrics:
|
end_date=current_date,
|
||||||
lines.append(f"{ticker}: No data available\n")
|
period=period,
|
||||||
continue
|
)
|
||||||
|
if not metrics:
|
||||||
|
return f"{ticker}: No data available\n"
|
||||||
|
|
||||||
m = metrics[0]
|
m = metrics[0]
|
||||||
lines.append(f"{ticker}:")
|
ticker_lines = [
|
||||||
lines.append(f" Market Cap: ${_fmt(m.market_cap, ',.0f')}")
|
f"{ticker}:",
|
||||||
lines.append(
|
f" Market Cap: ${_fmt(m.market_cap, ',.0f')}",
|
||||||
f" P/E: {_fmt(m.price_to_earnings_ratio)} | P/B: {_fmt(m.price_to_book_ratio)} | P/S: {_fmt(m.price_to_sales_ratio)}",
|
f" P/E: {_fmt(m.price_to_earnings_ratio)} | P/B: {_fmt(m.price_to_book_ratio)} | P/S: {_fmt(m.price_to_sales_ratio)}",
|
||||||
)
|
f" ROE: {_fmt(m.return_on_equity, '.1%')} | Net Margin: {_fmt(m.net_margin, '.1%')}",
|
||||||
lines.append(
|
f" Revenue Growth: {_fmt(m.revenue_growth, '.1%')} | Earnings Growth: {_fmt(m.earnings_growth, '.1%')}",
|
||||||
f" ROE: {_fmt(m.return_on_equity, '.1%')} | Net Margin: {_fmt(m.net_margin, '.1%')}",
|
f" Current Ratio: {_fmt(m.current_ratio)} | D/E: {_fmt(m.debt_to_equity)}",
|
||||||
)
|
f" EPS: ${_fmt(m.earnings_per_share)} | FCF/Share: ${_fmt(m.free_cash_flow_per_share)}\n",
|
||||||
lines.append(
|
]
|
||||||
f" Revenue Growth: {_fmt(m.revenue_growth, '.1%')} | Earnings Growth: {_fmt(m.earnings_growth, '.1%')}",
|
return "\n".join(ticker_lines)
|
||||||
)
|
except Exception as e:
|
||||||
lines.append(
|
return f"{ticker}: Error fetching data - {str(e)}\n"
|
||||||
f" Current Ratio: {_fmt(m.current_ratio)} | D/E: {_fmt(m.debt_to_equity)}",
|
|
||||||
)
|
# Parallelize data retrieval for all tickers
|
||||||
lines.append(
|
results = await asyncio.gather(*[_fetch_one(t) for t in tickers])
|
||||||
f" EPS: ${_fmt(m.earnings_per_share)} | FCF/Share: ${_fmt(m.free_cash_flow_per_share)}",
|
lines.extend(results)
|
||||||
)
|
|
||||||
lines.append("")
|
|
||||||
|
|
||||||
return _to_text_response("\n".join(lines))
|
return _to_text_response("\n".join(lines))
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,11 @@ as described in the Dynamic Team Architecture.
|
|||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
from typing import Any, Dict, List, Optional, Callable
|
from typing import Any, Dict, List, Optional, Callable
|
||||||
from dataclasses import asdict
|
|
||||||
|
from agentscope.message import TextBlock
|
||||||
|
from agentscope.tool import ToolResponse
|
||||||
|
|
||||||
from backend.agents.dynamic_team_types import (
|
from backend.agents.dynamic_team_types import (
|
||||||
AnalystPersona,
|
AnalystPersona,
|
||||||
@@ -22,7 +25,7 @@ from backend.agents.dynamic_team_types import (
|
|||||||
CreateAnalystResult,
|
CreateAnalystResult,
|
||||||
AnalystTypeInfo,
|
AnalystTypeInfo,
|
||||||
)
|
)
|
||||||
from backend.config.constants import ANALYST_TYPES
|
from backend.config.constants import ANALYST_TYPES, AGENT_CONFIG
|
||||||
|
|
||||||
|
|
||||||
# Type alias for callbacks set by pipeline
|
# Type alias for callbacks set by pipeline
|
||||||
@@ -30,6 +33,14 @@ CreateAnalystCallback = Callable[[str, str, Optional[AnalystConfig]], str]
|
|||||||
RemoveAnalystCallback = Callable[[str], str]
|
RemoveAnalystCallback = Callable[[str], str]
|
||||||
|
|
||||||
|
|
||||||
|
def _to_tool_response(payload: Any) -> ToolResponse:
|
||||||
|
if isinstance(payload, str):
|
||||||
|
text = payload
|
||||||
|
else:
|
||||||
|
text = json.dumps(payload, ensure_ascii=False, indent=2, default=str)
|
||||||
|
return ToolResponse(content=[TextBlock(type="text", text=text)])
|
||||||
|
|
||||||
|
|
||||||
class DynamicTeamController:
|
class DynamicTeamController:
|
||||||
"""Controller for dynamic analyst team management.
|
"""Controller for dynamic analyst team management.
|
||||||
|
|
||||||
@@ -296,6 +307,23 @@ class DynamicTeamController:
|
|||||||
Dict with analyst configuration and status
|
Dict with analyst configuration and status
|
||||||
"""
|
"""
|
||||||
config = self._instance_configs.get(agent_id)
|
config = self._instance_configs.get(agent_id)
|
||||||
|
current_analysts = self._get_analysts_callback() if self._get_analysts_callback else []
|
||||||
|
analyst_map = {
|
||||||
|
(getattr(agent, "name", None) or getattr(agent, "agent_id", None)): agent
|
||||||
|
for agent in current_analysts
|
||||||
|
}
|
||||||
|
if agent_id in analyst_map and not config:
|
||||||
|
builtin_meta = AGENT_CONFIG.get(agent_id, {})
|
||||||
|
return {
|
||||||
|
"found": True,
|
||||||
|
"agent_id": agent_id,
|
||||||
|
"name": builtin_meta.get("name") or agent_id,
|
||||||
|
"type": agent_id,
|
||||||
|
"is_custom": False,
|
||||||
|
"is_clone": False,
|
||||||
|
"is_builtin": True,
|
||||||
|
"message": f"Built-in analyst '{agent_id}' is active",
|
||||||
|
}
|
||||||
if not config:
|
if not config:
|
||||||
return {
|
return {
|
||||||
"found": False,
|
"found": False,
|
||||||
@@ -310,6 +338,7 @@ class DynamicTeamController:
|
|||||||
"is_custom": config.persona is not None,
|
"is_custom": config.persona is not None,
|
||||||
"is_clone": config.parent_id is not None,
|
"is_clone": config.parent_id is not None,
|
||||||
"parent_id": config.parent_id,
|
"parent_id": config.parent_id,
|
||||||
|
"is_builtin": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
def register_analyst_type(
|
def register_analyst_type(
|
||||||
@@ -372,13 +401,26 @@ class DynamicTeamController:
|
|||||||
Dict with team composition information
|
Dict with team composition information
|
||||||
"""
|
"""
|
||||||
analysts = []
|
analysts = []
|
||||||
for agent_id, config in self._instance_configs.items():
|
current_analysts = self._get_analysts_callback() if self._get_analysts_callback else []
|
||||||
|
instance_configs = self._instance_configs
|
||||||
|
|
||||||
|
for agent in current_analysts:
|
||||||
|
agent_id = getattr(agent, "name", None) or getattr(agent, "agent_id", None)
|
||||||
|
if not agent_id:
|
||||||
|
continue
|
||||||
|
config = instance_configs.get(agent_id)
|
||||||
|
builtin_meta = AGENT_CONFIG.get(agent_id, {})
|
||||||
analysts.append({
|
analysts.append({
|
||||||
"agent_id": agent_id,
|
"agent_id": agent_id,
|
||||||
"name": config.persona.name if config.persona else agent_id,
|
"name": (
|
||||||
"type": config.analyst_type,
|
config.persona.name
|
||||||
"is_custom": config.persona is not None,
|
if config and config.persona and config.persona.name
|
||||||
"is_clone": config.parent_id is not None,
|
else builtin_meta.get("name") or agent_id
|
||||||
|
),
|
||||||
|
"type": config.analyst_type if config else agent_id,
|
||||||
|
"is_custom": bool(config and config.persona is not None),
|
||||||
|
"is_clone": bool(config and config.parent_id is not None),
|
||||||
|
"is_builtin": config is None,
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -418,91 +460,95 @@ def get_controller() -> Optional[DynamicTeamController]:
|
|||||||
def create_analyst(
|
def create_analyst(
|
||||||
agent_id: str,
|
agent_id: str,
|
||||||
analyst_type: str,
|
analyst_type: str,
|
||||||
name: Optional[str] = None,
|
name: str = "",
|
||||||
focus: Optional[str] = None,
|
focus: str = "",
|
||||||
description: Optional[str] = None,
|
description: str = "",
|
||||||
soul_md: Optional[str] = None,
|
soul_md: str = "",
|
||||||
agents_md: Optional[str] = None,
|
agents_md: str = "",
|
||||||
model_name: Optional[str] = None,
|
model_name: str = "",
|
||||||
) -> Dict[str, Any]:
|
) -> ToolResponse:
|
||||||
"""Tool wrapper for create_analyst.
|
"""Tool wrapper for create_analyst.
|
||||||
|
|
||||||
Note: focus parameter accepts comma-separated string for tool compatibility.
|
Note: focus parameter accepts comma-separated string for tool compatibility.
|
||||||
"""
|
"""
|
||||||
controller = get_controller()
|
controller = get_controller()
|
||||||
if not controller:
|
if not controller:
|
||||||
return {"success": False, "error": "Dynamic team controller not available"}
|
return _to_tool_response({"success": False, "error": "Dynamic team controller not available"})
|
||||||
|
|
||||||
focus_list = [f.strip() for f in focus.split(",")] if focus else None
|
focus_list = [f.strip() for f in focus.split(",")] if focus else None
|
||||||
return controller.create_analyst(
|
return _to_tool_response(
|
||||||
agent_id=agent_id,
|
controller.create_analyst(
|
||||||
analyst_type=analyst_type,
|
agent_id=agent_id,
|
||||||
name=name,
|
analyst_type=analyst_type,
|
||||||
focus=focus_list,
|
name=name,
|
||||||
description=description,
|
focus=focus_list,
|
||||||
soul_md=soul_md,
|
description=description,
|
||||||
agents_md=agents_md,
|
soul_md=soul_md,
|
||||||
model_name=model_name,
|
agents_md=agents_md,
|
||||||
|
model_name=model_name,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def clone_analyst(
|
def clone_analyst(
|
||||||
source_id: str,
|
source_id: str,
|
||||||
new_id: str,
|
new_id: str,
|
||||||
name: Optional[str] = None,
|
name: str = "",
|
||||||
focus_additions: Optional[str] = None,
|
focus_additions: str = "",
|
||||||
description_override: Optional[str] = None,
|
description_override: str = "",
|
||||||
model_name: Optional[str] = None,
|
model_name: str = "",
|
||||||
) -> Dict[str, Any]:
|
) -> ToolResponse:
|
||||||
"""Tool wrapper for clone_analyst.
|
"""Tool wrapper for clone_analyst.
|
||||||
|
|
||||||
Note: focus_additions accepts comma-separated string.
|
Note: focus_additions accepts comma-separated string.
|
||||||
"""
|
"""
|
||||||
controller = get_controller()
|
controller = get_controller()
|
||||||
if not controller:
|
if not controller:
|
||||||
return {"success": False, "error": "Dynamic team controller not available"}
|
return _to_tool_response({"success": False, "error": "Dynamic team controller not available"})
|
||||||
|
|
||||||
additions_list = [f.strip() for f in focus_additions.split(",")] if focus_additions else None
|
additions_list = [f.strip() for f in focus_additions.split(",")] if focus_additions else None
|
||||||
return controller.clone_analyst(
|
return _to_tool_response(
|
||||||
source_id=source_id,
|
controller.clone_analyst(
|
||||||
new_id=new_id,
|
source_id=source_id,
|
||||||
name=name,
|
new_id=new_id,
|
||||||
focus_additions=additions_list,
|
name=name,
|
||||||
description_override=description_override,
|
focus_additions=additions_list,
|
||||||
model_name=model_name,
|
description_override=description_override,
|
||||||
|
model_name=model_name,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def remove_analyst(agent_id: str) -> Dict[str, Any]:
|
def remove_analyst(agent_id: str) -> ToolResponse:
|
||||||
"""Tool wrapper for remove_analyst."""
|
"""Tool wrapper for remove_analyst."""
|
||||||
controller = get_controller()
|
controller = get_controller()
|
||||||
if not controller:
|
if not controller:
|
||||||
return {"success": False, "error": "Dynamic team controller not available"}
|
return _to_tool_response({"success": False, "error": "Dynamic team controller not available"})
|
||||||
return controller.remove_analyst(agent_id)
|
return _to_tool_response(controller.remove_analyst(agent_id))
|
||||||
|
|
||||||
|
|
||||||
def list_analyst_types() -> List[Dict[str, Any]]:
|
def list_analyst_types() -> ToolResponse:
|
||||||
"""Tool wrapper for list_analyst_types."""
|
"""Tool wrapper for list_analyst_types."""
|
||||||
controller = get_controller()
|
controller = get_controller()
|
||||||
if not controller:
|
if not controller:
|
||||||
return []
|
return _to_tool_response([])
|
||||||
return controller.list_analyst_types()
|
return _to_tool_response(controller.list_analyst_types())
|
||||||
|
|
||||||
|
|
||||||
def get_analyst_info(agent_id: str) -> Dict[str, Any]:
|
def get_analyst_info(agent_id: str) -> ToolResponse:
|
||||||
"""Tool wrapper for get_analyst_info."""
|
"""Tool wrapper for get_analyst_info."""
|
||||||
controller = get_controller()
|
controller = get_controller()
|
||||||
if not controller:
|
if not controller:
|
||||||
return {"found": False, "error": "Controller not available"}
|
return _to_tool_response({"found": False, "error": "Controller not available"})
|
||||||
return controller.get_analyst_info(agent_id)
|
return _to_tool_response(controller.get_analyst_info(agent_id))
|
||||||
|
|
||||||
|
|
||||||
def get_team_summary() -> Dict[str, Any]:
|
def get_team_summary() -> ToolResponse:
|
||||||
"""Tool wrapper for get_team_summary."""
|
"""Tool wrapper for get_team_summary."""
|
||||||
controller = get_controller()
|
controller = get_controller()
|
||||||
if not controller:
|
if not controller:
|
||||||
return {"error": "Controller not available"}
|
return _to_tool_response({"error": "Controller not available"})
|
||||||
return controller.get_team_summary()
|
return _to_tool_response(controller.get_team_summary())
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
|||||||
@@ -19,7 +19,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import warnings
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -71,7 +70,6 @@ class NoSandboxBackend(SandboxBackend):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._module_cache = {}
|
self._module_cache = {}
|
||||||
self._warning_shown = False
|
|
||||||
|
|
||||||
def _get_script_name(self, function_name: str) -> str:
|
def _get_script_name(self, function_name: str) -> str:
|
||||||
"""
|
"""
|
||||||
@@ -96,19 +94,6 @@ class NoSandboxBackend(SandboxBackend):
|
|||||||
) -> dict:
|
) -> dict:
|
||||||
"""直接导入模块并执行函数"""
|
"""直接导入模块并执行函数"""
|
||||||
|
|
||||||
# 首次使用时显示安全警告
|
|
||||||
if not self._warning_shown:
|
|
||||||
warnings.warn(
|
|
||||||
"\n" + "=" * 60 + "\n"
|
|
||||||
"⚠️ [安全警告] 技能在无沙盒模式下运行 (SKILL_SANDBOX_MODE=none)\n"
|
|
||||||
" 技能脚本将直接在当前进程中执行,无隔离保护。\n"
|
|
||||||
" 建议:生产环境请设置 SKILL_SANDBOX_MODE=docker\n"
|
|
||||||
"=" * 60,
|
|
||||||
RuntimeWarning,
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
self._warning_shown = True
|
|
||||||
|
|
||||||
logger.debug(f"[NoSandbox] 执行技能: {skill_name}.{function_name}")
|
logger.debug(f"[NoSandbox] 执行技能: {skill_name}.{function_name}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -345,13 +330,13 @@ class SkillSandbox:
|
|||||||
self._backend = self._create_backend()
|
self._backend = self._create_backend()
|
||||||
self._initialized = True
|
self._initialized = True
|
||||||
|
|
||||||
logger.info(f"SkillSandbox 初始化完成,模式: {self.mode}")
|
logger.debug(f"SkillSandbox 初始化完成,模式: {self.mode}")
|
||||||
|
|
||||||
def _create_backend(self) -> SandboxBackend:
|
def _create_backend(self) -> SandboxBackend:
|
||||||
"""根据模式创建对应后端"""
|
"""根据模式创建对应后端"""
|
||||||
|
|
||||||
if self.mode == "none":
|
if self.mode == "none":
|
||||||
logger.info("使用无沙盒模式(直接执行)")
|
logger.debug("使用无沙盒模式(直接执行)")
|
||||||
return NoSandboxBackend()
|
return NoSandboxBackend()
|
||||||
|
|
||||||
elif self.mode == "docker":
|
elif self.mode == "docker":
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ Before using the production scripts, ensure the runtime environment has:
|
|||||||
- a usable Python environment
|
- a usable Python environment
|
||||||
- backend dependencies installed from the checked-in Python package metadata in `pyproject.toml`
|
- backend dependencies installed from the checked-in Python package metadata in `pyproject.toml`
|
||||||
- the package installed with `pip install -e .` or `uv pip install -e .`
|
- the package installed with `pip install -e .` or `uv pip install -e .`
|
||||||
- frontend dependencies installed with `npm ci`
|
- frontend dependencies installed with `npm install`
|
||||||
- repo dependencies installed
|
- repo dependencies installed
|
||||||
- required market/model API keys
|
- required market/model API keys
|
||||||
- any desired `TICKERS` override
|
- any desired `TICKERS` override
|
||||||
@@ -94,7 +94,7 @@ Recommended production install sequence:
|
|||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
pip install -e .
|
pip install -e .
|
||||||
cd frontend && npm ci && npm run build && cd ..
|
cd frontend && npm install && npm run build && cd ..
|
||||||
```
|
```
|
||||||
|
|
||||||
## Skill Sandbox Configuration
|
## Skill Sandbox Configuration
|
||||||
|
|||||||
656
deploy/install-production.sh
Executable file
@@ -0,0 +1,656 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||||
|
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
RED='\033[0;31m'
|
||||||
|
CYAN='\033[0;36m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
NON_INTERACTIVE=false
|
||||||
|
AUTO_INSTALL_DEPS=""
|
||||||
|
AUTO_INSTALL_SYSTEMD=""
|
||||||
|
AUTO_START_SYSTEMD=""
|
||||||
|
AUTO_INSTALL_NGINX=""
|
||||||
|
AUTO_RELOAD_NGINX=""
|
||||||
|
AUTO_USE_TLS=""
|
||||||
|
AUTO_USE_DOCKER=""
|
||||||
|
|
||||||
|
log() {
|
||||||
|
echo -e "${GREEN}[bigtime]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
warn() {
|
||||||
|
echo -e "${YELLOW}[bigtime]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
fail() {
|
||||||
|
echo -e "${RED}[bigtime]${NC} $*" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
ask() {
|
||||||
|
local prompt="$1"
|
||||||
|
local default="${2:-}"
|
||||||
|
if ${NON_INTERACTIVE}; then
|
||||||
|
printf '%s' "${default}"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
local value
|
||||||
|
if [[ -n "${default}" ]]; then
|
||||||
|
read -r -p "${prompt} [${default}]: " value
|
||||||
|
printf '%s' "${value:-$default}"
|
||||||
|
else
|
||||||
|
read -r -p "${prompt}: " value
|
||||||
|
printf '%s' "${value}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
ask_required() {
|
||||||
|
local prompt="$1"
|
||||||
|
local default="${2:-}"
|
||||||
|
local value=""
|
||||||
|
while [[ -z "${value}" ]]; do
|
||||||
|
value="$(ask "${prompt}" "${default}")"
|
||||||
|
if [[ -z "${value}" ]]; then
|
||||||
|
warn "该项不能为空,请重新输入。"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
printf '%s' "${value}"
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_domain_like() {
|
||||||
|
local value="$1"
|
||||||
|
[[ -z "${value}" ]] && return 1
|
||||||
|
[[ "${value}" =~ ^[A-Za-z0-9.-]+$ ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_file_parent_exists_or_rootable() {
|
||||||
|
local value="$1"
|
||||||
|
local parent
|
||||||
|
parent="$(dirname "${value}")"
|
||||||
|
[[ -d "${parent}" ]] || [[ "${parent}" == "/etc/bigtime" ]] || [[ "${parent}" == "/etc/nginx/conf.d" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
validate_numeric() {
|
||||||
|
local value="$1"
|
||||||
|
[[ "${value}" =~ ^[0-9]+([.][0-9]+)?$ ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
confirm() {
|
||||||
|
local prompt="$1"
|
||||||
|
local default="${2:-Y}"
|
||||||
|
local override="${3:-}"
|
||||||
|
if [[ -n "${override}" ]]; then
|
||||||
|
[[ "${override}" =~ ^[Yy]([Ee][Ss])?$|^true$|^1$ ]]
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
if ${NON_INTERACTIVE}; then
|
||||||
|
[[ "${default}" == "Y" ]]
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
local suffix="[Y/n]"
|
||||||
|
[[ "${default}" == "N" ]] && suffix="[y/N]"
|
||||||
|
local value
|
||||||
|
read -r -p "${prompt} ${suffix}: " value
|
||||||
|
value="${value:-$default}"
|
||||||
|
[[ "${value}" =~ ^[Yy]$ ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
command_exists() {
|
||||||
|
command -v "$1" >/dev/null 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
detect_pkg_manager() {
|
||||||
|
if command_exists apt-get; then
|
||||||
|
echo "apt"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
if command_exists dnf; then
|
||||||
|
echo "dnf"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
if command_exists yum; then
|
||||||
|
echo "yum"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
install_packages() {
|
||||||
|
local pkg_manager="$1"
|
||||||
|
case "${pkg_manager}" in
|
||||||
|
apt)
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3 python3-venv python3-pip nginx curl git build-essential nodejs npm
|
||||||
|
;;
|
||||||
|
dnf)
|
||||||
|
sudo dnf install -y python3 python3-pip nginx curl git gcc-c++ make nodejs npm
|
||||||
|
;;
|
||||||
|
yum)
|
||||||
|
sudo yum install -y python3 python3-pip nginx curl git gcc-c++ make nodejs npm
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
warn "未识别包管理器,跳过依赖安装。请手动安装 python3、venv、pip、nginx、node、npm。"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
render_systemd_unit() {
|
||||||
|
local service_name="$1"
|
||||||
|
local app_module="$2"
|
||||||
|
local port="$3"
|
||||||
|
local workers="$4"
|
||||||
|
local memory_max="$5"
|
||||||
|
local unit_path="$6"
|
||||||
|
|
||||||
|
sudo tee "${unit_path}" >/dev/null <<EOF
|
||||||
|
[Unit]
|
||||||
|
Description=BigTime ${service_name}
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=${SERVICE_USER}
|
||||||
|
Group=${SERVICE_GROUP}
|
||||||
|
WorkingDirectory=${APP_DIR}
|
||||||
|
EnvironmentFile=${ENV_FILE}
|
||||||
|
ExecStart=${PYTHON_BIN} -m uvicorn ${app_module} --host 127.0.0.1 --port ${port} --workers ${workers} --log-level warning --no-access-log
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
TimeoutStopSec=30
|
||||||
|
KillMode=mixed
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=false
|
||||||
|
LimitNOFILE=65535
|
||||||
|
TasksMax=4096
|
||||||
|
MemoryMax=${memory_max}
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
render_nginx_conf() {
|
||||||
|
local target="$1"
|
||||||
|
local use_tls="$2"
|
||||||
|
if [[ "${use_tls}" == "yes" ]]; then
|
||||||
|
sudo tee "${target}" >/dev/null <<EOF
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name ${DOMAIN};
|
||||||
|
|
||||||
|
root ${APP_DIR}/frontend/dist;
|
||||||
|
|
||||||
|
location /.well-known/acme-challenge/ {
|
||||||
|
allow all;
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
return 301 https://\$host\$request_uri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 443 ssl http2;
|
||||||
|
server_name ${DOMAIN};
|
||||||
|
|
||||||
|
root ${APP_DIR}/frontend/dist;
|
||||||
|
index index.html;
|
||||||
|
|
||||||
|
ssl_certificate ${SSL_CERT_PATH};
|
||||||
|
ssl_certificate_key ${SSL_KEY_PATH};
|
||||||
|
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||||
|
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||||
|
|
||||||
|
location /ws {
|
||||||
|
proxy_pass http://127.0.0.1:8765;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade \$http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/runtime/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/dynamic-team/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/trading/ {
|
||||||
|
proxy_pass http://127.0.0.1:8001;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/news/ {
|
||||||
|
proxy_pass http://127.0.0.1:8002;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://127.0.0.1:8000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
try_files \$uri \$uri/ /index.html;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
sudo tee "${target}" >/dev/null <<EOF
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name ${DOMAIN};
|
||||||
|
|
||||||
|
root ${APP_DIR}/frontend/dist;
|
||||||
|
index index.html;
|
||||||
|
|
||||||
|
location /.well-known/acme-challenge/ {
|
||||||
|
allow all;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /ws {
|
||||||
|
proxy_pass http://127.0.0.1:8765;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade \$http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/runtime/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/dynamic-team/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/trading/ {
|
||||||
|
proxy_pass http://127.0.0.1:8001;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/news/ {
|
||||||
|
proxy_pass http://127.0.0.1:8002;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://127.0.0.1:8000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
try_files \$uri \$uri/ /index.html;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
write_env_file() {
|
||||||
|
sudo mkdir -p "$(dirname "${ENV_FILE}")"
|
||||||
|
sudo tee "${ENV_FILE}" >/dev/null <<EOF
|
||||||
|
AGENT_SERVICE_URL=http://127.0.0.1:8000
|
||||||
|
TRADING_SERVICE_URL=http://127.0.0.1:8001
|
||||||
|
NEWS_SERVICE_URL=http://127.0.0.1:8002
|
||||||
|
RUNTIME_SERVICE_URL=http://127.0.0.1:8003
|
||||||
|
|
||||||
|
TICKERS=${TICKERS}
|
||||||
|
FIN_DATA_SOURCE=${FIN_DATA_SOURCE}
|
||||||
|
FINANCIAL_DATASETS_API_KEY=${FINANCIAL_DATASETS_API_KEY}
|
||||||
|
FINNHUB_API_KEY=${FINNHUB_API_KEY}
|
||||||
|
POLYGON_API_KEY=${POLYGON_API_KEY}
|
||||||
|
OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||||
|
OPENAI_BASE_URL=${OPENAI_BASE_URL}
|
||||||
|
DASHSCOPE_API_KEY=${DASHSCOPE_API_KEY}
|
||||||
|
MODEL_NAME=${MODEL_NAME}
|
||||||
|
MEMORY_API_KEY=${MEMORY_API_KEY}
|
||||||
|
SKILL_SANDBOX_MODE=${SKILL_SANDBOX_MODE}
|
||||||
|
MAX_COMM_CYCLES=${MAX_COMM_CYCLES}
|
||||||
|
MARGIN_REQUIREMENT=${MARGIN_REQUIREMENT}
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<EOF
|
||||||
|
Usage:
|
||||||
|
./deploy/install-production.sh [options]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--non-interactive Run with defaults / env overrides only
|
||||||
|
--app-dir PATH Application directory
|
||||||
|
--service-user USER systemd service user
|
||||||
|
--service-group GROUP systemd service group
|
||||||
|
--domain DOMAIN Public domain
|
||||||
|
--env-file PATH Environment file path
|
||||||
|
--python-bin PATH Python executable path
|
||||||
|
--tickers CSV Default tickers
|
||||||
|
--fin-data-source NAME finnhub/yfinance/financial_datasets
|
||||||
|
--model-name NAME Default model name
|
||||||
|
--max-comm-cycles N Conference rounds
|
||||||
|
--margin-requirement NUM Margin requirement
|
||||||
|
--use-docker-sandbox Set SKILL_SANDBOX_MODE=docker
|
||||||
|
--no-docker-sandbox Set SKILL_SANDBOX_MODE=none
|
||||||
|
--with-tls Generate HTTPS nginx config
|
||||||
|
--without-tls Generate HTTP nginx config
|
||||||
|
--install-deps Auto install dependencies
|
||||||
|
--skip-install-deps Skip dependency installation
|
||||||
|
--install-systemd Install systemd units
|
||||||
|
--skip-install-systemd Skip systemd unit installation
|
||||||
|
--start-systemd Enable/start services
|
||||||
|
--skip-start-systemd Do not start services
|
||||||
|
--install-nginx Install nginx config
|
||||||
|
--skip-install-nginx Skip nginx config installation
|
||||||
|
--reload-nginx Run nginx -t and reload
|
||||||
|
--skip-reload-nginx Skip nginx reload
|
||||||
|
--ssl-cert-path PATH TLS certificate path
|
||||||
|
--ssl-key-path PATH TLS key path
|
||||||
|
--help Show this help
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--non-interactive) NON_INTERACTIVE=true ;;
|
||||||
|
--app-dir) APP_DIR="${2:?missing value}"; shift ;;
|
||||||
|
--service-user) SERVICE_USER="${2:?missing value}"; shift ;;
|
||||||
|
--service-group) SERVICE_GROUP="${2:?missing value}"; shift ;;
|
||||||
|
--domain) DOMAIN="${2:?missing value}"; shift ;;
|
||||||
|
--env-file) ENV_FILE="${2:?missing value}"; shift ;;
|
||||||
|
--python-bin) PYTHON_BIN="${2:?missing value}"; shift ;;
|
||||||
|
--tickers) TICKERS="${2:?missing value}"; shift ;;
|
||||||
|
--fin-data-source) FIN_DATA_SOURCE="${2:?missing value}"; shift ;;
|
||||||
|
--model-name) MODEL_NAME="${2:?missing value}"; shift ;;
|
||||||
|
--max-comm-cycles) MAX_COMM_CYCLES="${2:?missing value}"; shift ;;
|
||||||
|
--margin-requirement) MARGIN_REQUIREMENT="${2:?missing value}"; shift ;;
|
||||||
|
--use-docker-sandbox) AUTO_USE_DOCKER="Y" ;;
|
||||||
|
--no-docker-sandbox) AUTO_USE_DOCKER="N" ;;
|
||||||
|
--with-tls) AUTO_USE_TLS="Y" ;;
|
||||||
|
--without-tls) AUTO_USE_TLS="N" ;;
|
||||||
|
--install-deps) AUTO_INSTALL_DEPS="Y" ;;
|
||||||
|
--skip-install-deps) AUTO_INSTALL_DEPS="N" ;;
|
||||||
|
--install-systemd) AUTO_INSTALL_SYSTEMD="Y" ;;
|
||||||
|
--skip-install-systemd) AUTO_INSTALL_SYSTEMD="N" ;;
|
||||||
|
--start-systemd) AUTO_START_SYSTEMD="Y" ;;
|
||||||
|
--skip-start-systemd) AUTO_START_SYSTEMD="N" ;;
|
||||||
|
--install-nginx) AUTO_INSTALL_NGINX="Y" ;;
|
||||||
|
--skip-install-nginx) AUTO_INSTALL_NGINX="N" ;;
|
||||||
|
--reload-nginx) AUTO_RELOAD_NGINX="Y" ;;
|
||||||
|
--skip-reload-nginx) AUTO_RELOAD_NGINX="N" ;;
|
||||||
|
--ssl-cert-path) SSL_CERT_PATH="${2:?missing value}"; shift ;;
|
||||||
|
--ssl-key-path) SSL_KEY_PATH="${2:?missing value}"; shift ;;
|
||||||
|
--help) usage; exit 0 ;;
|
||||||
|
*) fail "Unknown option: $1" ;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
main() {
|
||||||
|
echo -e "${CYAN}BigTime 生产部署向导${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}说明:${NC} 这个脚本适合从一台空机器开始部署当前项目。"
|
||||||
|
echo -e "${YELLOW}默认推荐:${NC} split-service + systemd + nginx + 静态前端。"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if confirm "尝试自动安装基础依赖(python3/nginx/node 等)?" "Y" "${AUTO_INSTALL_DEPS}"; then
|
||||||
|
PKG_MANAGER="$(detect_pkg_manager)"
|
||||||
|
install_packages "${PKG_MANAGER}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${CYAN}基础配置${NC}"
|
||||||
|
APP_DIR="${APP_DIR:-$(ask_required '应用部署目录(仓库根目录,建议绝对路径)' "${REPO_ROOT}")}"
|
||||||
|
[[ -d "${APP_DIR}" ]] || fail "应用目录不存在: ${APP_DIR}"
|
||||||
|
|
||||||
|
SERVICE_USER="${SERVICE_USER:-$(ask_required 'systemd 运行用户' "$(id -un)")}"
|
||||||
|
id "${SERVICE_USER}" >/dev/null 2>&1 || warn "用户 ${SERVICE_USER} 当前不存在,请确认后续 systemd 配置。"
|
||||||
|
|
||||||
|
SERVICE_GROUP="${SERVICE_GROUP:-$(ask_required 'systemd 运行用户组' "$(id -gn)")}"
|
||||||
|
|
||||||
|
# 自动尝试获取公网 IP 作为默认域名值
|
||||||
|
local detected_ip=""
|
||||||
|
if [[ -z "${DOMAIN:-}" ]]; then
|
||||||
|
log "正在尝试自动获取公网 IP..."
|
||||||
|
detected_ip=$(curl -s --connect-timeout 5 https://ifconfig.me || curl -s --connect-timeout 5 https://api.ipify.org || echo "")
|
||||||
|
if [[ -n "${detected_ip}" ]]; then
|
||||||
|
log "自动检测到公网 IP: ${detected_ip}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
DOMAIN="${DOMAIN:-$(ask_required '部署域名(可填写 IP 或 localhost)' "${detected_ip:-localhost}")}"
|
||||||
|
validate_domain_like "${DOMAIN}" || warn "域名/IP 形态看起来不标准,请再次确认: ${DOMAIN}"
|
||||||
|
|
||||||
|
ENV_FILE="${ENV_FILE:-$(ask_required '环境变量文件路径' '/etc/bigtime/bigtime.env')}"
|
||||||
|
validate_file_parent_exists_or_rootable "${ENV_FILE}" || warn "环境文件父目录当前不存在,脚本会尝试创建: $(dirname "${ENV_FILE}")"
|
||||||
|
|
||||||
|
PYTHON_BIN="${PYTHON_BIN:-$(ask 'Python 可执行文件路径' "${APP_DIR}/.venv/bin/python")}"
|
||||||
|
[[ -n "${PYTHON_BIN}" ]] || fail "Python 路径不能为空"
|
||||||
|
|
||||||
|
local SKIP_ENV_CONFIG=false
|
||||||
|
if [[ -f "${ENV_FILE}" ]]; then
|
||||||
|
echo ""
|
||||||
|
if confirm "检测到环境变量文件 ${ENV_FILE} 已存在,是否跳过详细参数配置并保留现有文件?" "Y"; then
|
||||||
|
SKIP_ENV_CONFIG=true
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! ${SKIP_ENV_CONFIG}; then
|
||||||
|
echo ""
|
||||||
|
echo -e "${CYAN}运行参数${NC}"
|
||||||
|
TICKERS="${TICKERS:-$(ask '默认股票池(逗号分隔)' 'AAPL,MSFT,GOOGL,AMZN,NVDA,META,TSLA,AMD,NFLX,AVGO,PLTR,COIN')}"
|
||||||
|
FIN_DATA_SOURCE="${FIN_DATA_SOURCE:-$(ask '行情数据源(finnhub/yfinance/financial_datasets)' 'finnhub')}"
|
||||||
|
MODEL_NAME="${MODEL_NAME:-$(ask '默认模型名' 'qwen3-max')}"
|
||||||
|
MAX_COMM_CYCLES="${MAX_COMM_CYCLES:-$(ask_required '最大讨论轮数' '2')}"
|
||||||
|
validate_numeric "${MAX_COMM_CYCLES}" || fail "最大讨论轮数必须是数字: ${MAX_COMM_CYCLES}"
|
||||||
|
MARGIN_REQUIREMENT="${MARGIN_REQUIREMENT:-$(ask_required '保证金比例' '0.5')}"
|
||||||
|
validate_numeric "${MARGIN_REQUIREMENT}" || fail "保证金比例必须是数字: ${MARGIN_REQUIREMENT}"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${CYAN}密钥配置${NC}"
|
||||||
|
FINANCIAL_DATASETS_API_KEY="${FINANCIAL_DATASETS_API_KEY:-$(ask 'FINANCIAL_DATASETS_API_KEY(可留空)' '')}"
|
||||||
|
FINNHUB_API_KEY="${FINNHUB_API_KEY:-$(ask 'FINNHUB_API_KEY(live 模式建议填写)' '')}"
|
||||||
|
POLYGON_API_KEY="${POLYGON_API_KEY:-$(ask 'POLYGON_API_KEY(可留空)' '')}"
|
||||||
|
OPENAI_API_KEY="${OPENAI_API_KEY:-$(ask 'OPENAI_API_KEY(可留空)' '')}"
|
||||||
|
OPENAI_BASE_URL="${OPENAI_BASE_URL:-$(ask 'OPENAI_BASE_URL(可留空)' '')}"
|
||||||
|
DASHSCOPE_API_KEY="${DASHSCOPE_API_KEY:-$(ask 'DASHSCOPE_API_KEY(可留空)' '')}"
|
||||||
|
MEMORY_API_KEY="${MEMORY_API_KEY:-$(ask 'MEMORY_API_KEY(可留空)' '')}"
|
||||||
|
|
||||||
|
if [[ "${FIN_DATA_SOURCE}" == "finnhub" && -z "${FINNHUB_API_KEY}" ]]; then
|
||||||
|
warn "你选择了 finnhub 作为数据源,但 FINNHUB_API_KEY 为空。live 模式下通常会失败。"
|
||||||
|
fi
|
||||||
|
if [[ -z "${OPENAI_API_KEY}" && -z "${DASHSCOPE_API_KEY}" ]]; then
|
||||||
|
warn "OPENAI_API_KEY 和 DASHSCOPE_API_KEY 都为空,模型调用可能无法工作。"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if confirm "使用 Docker 沙盒执行技能?" "N" "${AUTO_USE_DOCKER}"; then
|
||||||
|
SKILL_SANDBOX_MODE="docker"
|
||||||
|
else
|
||||||
|
SKILL_SANDBOX_MODE="none"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${CYAN}当前部署摘要${NC}"
|
||||||
|
echo " 应用目录: ${APP_DIR}"
|
||||||
|
echo " 运行用户: ${SERVICE_USER}:${SERVICE_GROUP}"
|
||||||
|
echo " 域名: ${DOMAIN}"
|
||||||
|
echo " 环境文件: ${ENV_FILE}"
|
||||||
|
echo " Python: ${PYTHON_BIN}"
|
||||||
|
echo " 数据源: ${FIN_DATA_SOURCE:-}"
|
||||||
|
echo " 模型: ${MODEL_NAME:-}"
|
||||||
|
echo " 沙盒模式: ${SKILL_SANDBOX_MODE:-none}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if ! confirm "确认以上配置并继续写入系统文件?" "Y"; then
|
||||||
|
fail "用户取消部署。"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "${GREEN}将使用现有的环境文件,跳过详细参数配置。${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -x "${PYTHON_BIN}" ]]; then
|
||||||
|
warn "未找到 ${PYTHON_BIN},准备创建虚拟环境。"
|
||||||
|
python3 -m venv "${APP_DIR}/.venv"
|
||||||
|
"${APP_DIR}/.venv/bin/python" -m pip install --upgrade pip
|
||||||
|
PYTHON_BIN="${APP_DIR}/.venv/bin/python"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "安装后端依赖"
|
||||||
|
"${PYTHON_BIN}" -m pip install -e "${APP_DIR}"
|
||||||
|
|
||||||
|
log "构建前端"
|
||||||
|
(cd "${APP_DIR}/frontend" && npm install && npm run build)
|
||||||
|
|
||||||
|
if ! ${SKIP_ENV_CONFIG}; then
|
||||||
|
log "写入环境变量文件 ${ENV_FILE}"
|
||||||
|
write_env_file
|
||||||
|
fi
|
||||||
|
|
||||||
|
if confirm "生成并安装 systemd unit?" "Y" "${AUTO_INSTALL_SYSTEMD}"; then
|
||||||
|
render_systemd_unit "Agent Service" "backend.apps.agent_service:app" "8000" "1" "1024M" "/etc/systemd/system/bigtime-agent.service"
|
||||||
|
render_systemd_unit "Trading Service" "backend.apps.trading_service:app" "8001" "1" "768M" "/etc/systemd/system/bigtime-trading.service"
|
||||||
|
render_systemd_unit "News Service" "backend.apps.news_service:app" "8002" "1" "768M" "/etc/systemd/system/bigtime-news.service"
|
||||||
|
render_systemd_unit "Runtime Service" "backend.apps.runtime_service:app" "8003" "1" "1536M" "/etc/systemd/system/bigtime-runtime.service"
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
if confirm "立即启用并启动 bigtime-* 服务?" "Y" "${AUTO_START_SYSTEMD}"; then
|
||||||
|
sudo systemctl enable --now bigtime-agent.service
|
||||||
|
sudo systemctl enable --now bigtime-trading.service
|
||||||
|
sudo systemctl enable --now bigtime-news.service
|
||||||
|
sudo systemctl enable --now bigtime-runtime.service
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if confirm "生成并安装 nginx 配置?" "Y" "${AUTO_INSTALL_NGINX}"; then
|
||||||
|
local use_tls="no"
|
||||||
|
if confirm "使用 HTTPS/Let's Encrypt 证书路径?" "N" "${AUTO_USE_TLS}"; then
|
||||||
|
SSL_CERT_PATH="${SSL_CERT_PATH:-$(ask_required 'SSL 证书 fullchain.pem 路径' "/etc/letsencrypt/live/${DOMAIN}/fullchain.pem")}"
|
||||||
|
SSL_KEY_PATH="${SSL_KEY_PATH:-$(ask_required 'SSL 私钥 privkey.pem 路径' "/etc/letsencrypt/live/${DOMAIN}/privkey.pem")}"
|
||||||
|
|
||||||
|
local ssl_err=0
|
||||||
|
[[ -f "${SSL_CERT_PATH}" ]] || { warn "SSL 证书文件不存在: ${SSL_CERT_PATH}"; ssl_err=1; }
|
||||||
|
[[ -f "${SSL_KEY_PATH}" ]] || { warn "SSL 私钥文件不存在: ${SSL_KEY_PATH}"; ssl_err=1; }
|
||||||
|
[[ -f "/etc/letsencrypt/options-ssl-nginx.conf" ]] || { warn "缺失 /etc/letsencrypt/options-ssl-nginx.conf,请检查 certbot 配置"; ssl_err=1; }
|
||||||
|
[[ -f "/etc/letsencrypt/ssl-dhparams.pem" ]] || { warn "缺失 /etc/letsencrypt/ssl-dhparams.pem,请检查 certbot 配置"; ssl_err=1; }
|
||||||
|
|
||||||
|
if [[ ${ssl_err} -eq 0 ]]; then
|
||||||
|
use_tls="yes"
|
||||||
|
else
|
||||||
|
warn "由于 SSL 关键文件缺失,将回退至 HTTP 模式,以确保 Nginx 能通过配置检查。"
|
||||||
|
use_tls="no"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
SSL_CERT_PATH=""
|
||||||
|
SSL_KEY_PATH=""
|
||||||
|
fi
|
||||||
|
NGINX_TARGET="/etc/nginx/conf.d/bigtime.conf"
|
||||||
|
render_nginx_conf "${NGINX_TARGET}" "${use_tls}"
|
||||||
|
if confirm "立即执行 nginx -t 并生效配置?" "Y" "${AUTO_RELOAD_NGINX}"; then
|
||||||
|
log "正在验证 Nginx 配置..."
|
||||||
|
if ! sudo nginx -t; then
|
||||||
|
fail "Nginx 配置检查失败!请根据上方报错信息调整。常见的错误包括:80/443 端口被占用,或 server_name 冲突。"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if systemctl is-active --quiet nginx; then
|
||||||
|
log "Nginx 正在运行,执行 reload..."
|
||||||
|
sudo systemctl reload nginx
|
||||||
|
else
|
||||||
|
log "Nginx 未运行,尝试启动..."
|
||||||
|
sudo systemctl enable --now nginx
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 关键修复:确保 nginx 用户对 /root 路径有 x 权限
|
||||||
|
if [[ "${APP_DIR}" == /root/* ]]; then
|
||||||
|
log "检测到应用部署在 /root 下,正在修复父目录访问权限..."
|
||||||
|
sudo chmod o+x /root 2>/dev/null || true
|
||||||
|
sudo chmod o+x "$(dirname "${APP_DIR}")" 2>/dev/null || true
|
||||||
|
sudo chmod -R o+rX "${APP_DIR}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Nginx 配置已生效。"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log "部署向导完成"
|
||||||
|
echo "应用目录: ${APP_DIR}"
|
||||||
|
echo "环境文件: ${ENV_FILE}"
|
||||||
|
echo "Python: ${PYTHON_BIN}"
|
||||||
|
echo "沙盒模式: ${SKILL_SANDBOX_MODE}"
|
||||||
|
echo ""
|
||||||
|
echo "验证建议:"
|
||||||
|
echo " curl http://127.0.0.1:8003/health"
|
||||||
|
echo " curl http://127.0.0.1:8003/api/runtime/current"
|
||||||
|
echo " sudo systemctl status bigtime-runtime.service"
|
||||||
|
echo " tail -f ${APP_DIR}/runs/<run_id>/logs/gateway.log"
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
@@ -2,8 +2,9 @@ server {
|
|||||||
listen 80;
|
listen 80;
|
||||||
server_name bigtime.cillinn.com;
|
server_name bigtime.cillinn.com;
|
||||||
|
|
||||||
|
root /opt/bigtime/app/frontend/dist;
|
||||||
|
|
||||||
location /.well-known/acme-challenge/ {
|
location /.well-known/acme-challenge/ {
|
||||||
root /var/www/bigtime/current;
|
|
||||||
allow all;
|
allow all;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -16,7 +17,7 @@ server {
|
|||||||
listen 443 ssl http2;
|
listen 443 ssl http2;
|
||||||
server_name bigtime.cillinn.com;
|
server_name bigtime.cillinn.com;
|
||||||
|
|
||||||
root /var/www/bigtime/current;
|
root /opt/bigtime/app/frontend/dist;
|
||||||
index index.html;
|
index index.html;
|
||||||
|
|
||||||
ssl_certificate /etc/letsencrypt/live/bigtime.cillinn.com/fullchain.pem;
|
ssl_certificate /etc/letsencrypt/live/bigtime.cillinn.com/fullchain.pem;
|
||||||
@@ -36,6 +37,56 @@ server {
|
|||||||
proxy_read_timeout 300s;
|
proxy_read_timeout 300s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
location /api/runtime/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/dynamic-team/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/trading/ {
|
||||||
|
proxy_pass http://127.0.0.1:8001;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/news/ {
|
||||||
|
proxy_pass http://127.0.0.1:8002;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://127.0.0.1:8000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,13 +2,75 @@ server {
|
|||||||
listen 80;
|
listen 80;
|
||||||
server_name bigtime.cillinn.com;
|
server_name bigtime.cillinn.com;
|
||||||
|
|
||||||
root /var/www/bigtime/current;
|
root /opt/bigtime/app/frontend/dist;
|
||||||
index index.html;
|
index index.html;
|
||||||
|
|
||||||
location /.well-known/acme-challenge/ {
|
location /.well-known/acme-challenge/ {
|
||||||
allow all;
|
allow all;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
location /ws {
|
||||||
|
proxy_pass http://127.0.0.1:8765;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/runtime/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/dynamic-team/ {
|
||||||
|
proxy_pass http://127.0.0.1:8003;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/trading/ {
|
||||||
|
proxy_pass http://127.0.0.1:8001;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/news/ {
|
||||||
|
proxy_pass http://127.0.0.1:8002;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://127.0.0.1:8000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
}
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
}
|
}
|
||||||
|
|||||||
134
deploy/production-deployment.md
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
# Production Deployment
|
||||||
|
|
||||||
|
This is the recommended production deployment mode for the current repository.
|
||||||
|
|
||||||
|
## Recommendation
|
||||||
|
|
||||||
|
Use:
|
||||||
|
|
||||||
|
- split FastAPI services
|
||||||
|
- `systemd` as the process supervisor
|
||||||
|
- `nginx` as TLS terminator and reverse proxy
|
||||||
|
- static frontend build served by `nginx`
|
||||||
|
- Docker-based skill sandbox
|
||||||
|
|
||||||
|
This matches the current architecture better than a monolithic process and is
|
||||||
|
lower-risk than introducing Kubernetes at the current stage.
|
||||||
|
|
||||||
|
## Why This Mode Fits Best
|
||||||
|
|
||||||
|
1. The repository already uses a split-service runtime model.
|
||||||
|
2. `runtime_service` is the correct control-plane entrypoint for starting and
|
||||||
|
stopping Gateway subprocesses.
|
||||||
|
3. The Gateway is run-scoped and ephemeral, which fits `systemd` + subprocess
|
||||||
|
management better than forcing everything into a single service binary.
|
||||||
|
4. Skill execution has security requirements; Docker sandboxing is the practical
|
||||||
|
production default.
|
||||||
|
|
||||||
|
## Service Layout
|
||||||
|
|
||||||
|
| Component | Bind |
|
||||||
|
|----------|------|
|
||||||
|
| `agent_service` | `127.0.0.1:8000` |
|
||||||
|
| `trading_service` | `127.0.0.1:8001` |
|
||||||
|
| `news_service` | `127.0.0.1:8002` |
|
||||||
|
| `runtime_service` | `127.0.0.1:8003` |
|
||||||
|
| gateway websocket | spawned by `runtime_service` |
|
||||||
|
| `nginx` | `:80` / `:443` |
|
||||||
|
|
||||||
|
## Frontend
|
||||||
|
|
||||||
|
Recommended frontend mode:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd frontend
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Then point `nginx` root at:
|
||||||
|
|
||||||
|
```text
|
||||||
|
/opt/bigtime/app/frontend/dist
|
||||||
|
```
|
||||||
|
|
||||||
|
This is preferred over running `backend.apps.frontend_service` in production,
|
||||||
|
because static serving via `nginx` is simpler and more reliable.
|
||||||
|
|
||||||
|
## Environment
|
||||||
|
|
||||||
|
Create a shared environment file, for example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo mkdir -p /etc/bigtime
|
||||||
|
sudo cp .env /etc/bigtime/bigtime.env
|
||||||
|
```
|
||||||
|
|
||||||
|
Required production settings:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
AGENT_SERVICE_URL=http://127.0.0.1:8000
|
||||||
|
TRADING_SERVICE_URL=http://127.0.0.1:8001
|
||||||
|
NEWS_SERVICE_URL=http://127.0.0.1:8002
|
||||||
|
RUNTIME_SERVICE_URL=http://127.0.0.1:8003
|
||||||
|
|
||||||
|
SKILL_SANDBOX_MODE=docker
|
||||||
|
SKILL_SANDBOX_MEMORY_LIMIT=512m
|
||||||
|
SKILL_SANDBOX_CPU_LIMIT=1.0
|
||||||
|
SKILL_SANDBOX_NETWORK=none
|
||||||
|
SKILL_SANDBOX_TIMEOUT=60
|
||||||
|
```
|
||||||
|
|
||||||
|
Also supply the required market/model API keys in the same environment file or
|
||||||
|
through your secret-management system.
|
||||||
|
|
||||||
|
## Data Persistence
|
||||||
|
|
||||||
|
Persist these paths on durable storage:
|
||||||
|
|
||||||
|
- `runs/`
|
||||||
|
- `logs/` if you keep service logs on disk
|
||||||
|
- optional `.env`-backed secrets should not live inside the repo working tree
|
||||||
|
|
||||||
|
The key runtime source of truth is:
|
||||||
|
|
||||||
|
- `runs/<run_id>/state/runtime_state.json`
|
||||||
|
- `runs/<run_id>/state/server_state.json`
|
||||||
|
- `runs/<run_id>/logs/gateway.log`
|
||||||
|
|
||||||
|
## nginx Pattern
|
||||||
|
|
||||||
|
Recommended routing:
|
||||||
|
|
||||||
|
- `/` -> static frontend
|
||||||
|
- `/api/runtime/*` -> `127.0.0.1:8003`
|
||||||
|
- `/api/dynamic-team/*` -> `127.0.0.1:8003`
|
||||||
|
- `/api/trading/*` -> `127.0.0.1:8001`
|
||||||
|
- `/api/news/*` -> `127.0.0.1:8002`
|
||||||
|
- `/api/*` -> `127.0.0.1:8000`
|
||||||
|
- `/ws` -> gateway websocket
|
||||||
|
|
||||||
|
The checked-in nginx config should be treated as a starting point, not a full
|
||||||
|
multi-service production config.
|
||||||
|
|
||||||
|
## Operational Notes
|
||||||
|
|
||||||
|
- Use `workers=1` for `runtime_service` unless you deliberately redesign the
|
||||||
|
runtime manager around multi-process coordination.
|
||||||
|
- Keep the other API services stateless and scale them separately if needed.
|
||||||
|
- Monitor:
|
||||||
|
- `runtime_service`
|
||||||
|
- run-scoped `gateway.log`
|
||||||
|
- Docker daemon health
|
||||||
|
- Rotate logs outside the app, e.g. with journald or logrotate.
|
||||||
|
|
||||||
|
## Best Next Step
|
||||||
|
|
||||||
|
Deploy with:
|
||||||
|
|
||||||
|
- `systemd` units from [deploy/systemd](/Users/cillin/workspeace/evotraders/deploy/systemd)
|
||||||
|
- `nginx` in front
|
||||||
|
- one VM first
|
||||||
|
|
||||||
|
Only move to containers/orchestration after the runtime/gateway operational
|
||||||
|
behavior is stable in that simpler topology.
|
||||||
47
deploy/systemd/README.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# systemd Units
|
||||||
|
|
||||||
|
This directory contains recommended `systemd` unit templates for the current
|
||||||
|
split-service production topology.
|
||||||
|
|
||||||
|
## Recommended Topology
|
||||||
|
|
||||||
|
- `agent_service` on `127.0.0.1:8000`
|
||||||
|
- `trading_service` on `127.0.0.1:8001`
|
||||||
|
- `news_service` on `127.0.0.1:8002`
|
||||||
|
- `runtime_service` on `127.0.0.1:8003`
|
||||||
|
- `nginx` serves `frontend/dist` and proxies `/api/*` + `/ws`
|
||||||
|
- `runtime_service` spawns the run-scoped Gateway subprocess on demand
|
||||||
|
- skill execution runs with `SKILL_SANDBOX_MODE=docker`
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Adjust these placeholders before installing:
|
||||||
|
|
||||||
|
- `/opt/bigtime/app` -> repository root on the server
|
||||||
|
- `/opt/bigtime/app/.venv/bin/python` -> Python interpreter
|
||||||
|
- `/etc/bigtime/bigtime.env` -> shared environment file
|
||||||
|
- `bigtime` -> service user
|
||||||
|
|
||||||
|
Then copy units:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo mkdir -p /etc/bigtime
|
||||||
|
sudo cp .env /etc/bigtime/bigtime.env
|
||||||
|
|
||||||
|
sudo cp deploy/systemd/bigtime-*.service /etc/systemd/system/
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
sudo systemctl enable --now bigtime-agent.service
|
||||||
|
sudo systemctl enable --now bigtime-trading.service
|
||||||
|
sudo systemctl enable --now bigtime-news.service
|
||||||
|
sudo systemctl enable --now bigtime-runtime.service
|
||||||
|
```
|
||||||
|
|
||||||
|
## Frontend
|
||||||
|
|
||||||
|
Recommended production frontend mode:
|
||||||
|
|
||||||
|
- build with `cd frontend && npm install && npm run build`
|
||||||
|
- let `nginx` serve `frontend/dist` directly
|
||||||
|
|
||||||
|
The repository also contains `backend.apps.frontend_service`, but for
|
||||||
|
production the lower-complexity path is static hosting via `nginx`.
|
||||||
25
deploy/systemd/bigtime-agent.service
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=BigTime Agent Service
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=bigtime
|
||||||
|
Group=bigtime
|
||||||
|
WorkingDirectory=/opt/bigtime/app
|
||||||
|
EnvironmentFile=/etc/bigtime/bigtime.env
|
||||||
|
ExecStart=/opt/bigtime/app/.venv/bin/python -m uvicorn backend.apps.agent_service:app --host 127.0.0.1 --port 8000 --workers 1 --log-level warning --no-access-log
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
TimeoutStopSec=20
|
||||||
|
KillMode=mixed
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=true
|
||||||
|
LimitNOFILE=65535
|
||||||
|
TasksMax=4096
|
||||||
|
MemoryMax=1024M
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
25
deploy/systemd/bigtime-news.service
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=BigTime News Service
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=bigtime
|
||||||
|
Group=bigtime
|
||||||
|
WorkingDirectory=/opt/bigtime/app
|
||||||
|
EnvironmentFile=/etc/bigtime/bigtime.env
|
||||||
|
ExecStart=/opt/bigtime/app/.venv/bin/python -m uvicorn backend.apps.news_service:app --host 127.0.0.1 --port 8002 --workers 1 --log-level warning --no-access-log
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
TimeoutStopSec=20
|
||||||
|
KillMode=mixed
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=true
|
||||||
|
LimitNOFILE=65535
|
||||||
|
TasksMax=4096
|
||||||
|
MemoryMax=768M
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
25
deploy/systemd/bigtime-runtime.service
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=BigTime Runtime Service
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=bigtime
|
||||||
|
Group=bigtime
|
||||||
|
WorkingDirectory=/opt/bigtime/app
|
||||||
|
EnvironmentFile=/etc/bigtime/bigtime.env
|
||||||
|
ExecStart=/opt/bigtime/app/.venv/bin/python -m uvicorn backend.apps.runtime_service:app --host 127.0.0.1 --port 8003 --workers 1 --log-level warning --no-access-log
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
TimeoutStopSec=30
|
||||||
|
KillMode=mixed
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=true
|
||||||
|
LimitNOFILE=65535
|
||||||
|
TasksMax=4096
|
||||||
|
MemoryMax=1536M
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
25
deploy/systemd/bigtime-trading.service
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=BigTime Trading Service
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=bigtime
|
||||||
|
Group=bigtime
|
||||||
|
WorkingDirectory=/opt/bigtime/app
|
||||||
|
EnvironmentFile=/etc/bigtime/bigtime.env
|
||||||
|
ExecStart=/opt/bigtime/app/.venv/bin/python -m uvicorn backend.apps.trading_service:app --host 127.0.0.1 --port 8001 --workers 1 --log-level warning --no-access-log
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
TimeoutStopSec=20
|
||||||
|
KillMode=mixed
|
||||||
|
NoNewPrivileges=true
|
||||||
|
PrivateTmp=true
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=true
|
||||||
|
LimitNOFILE=65535
|
||||||
|
TasksMax=4096
|
||||||
|
MemoryMax=768M
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
47
deploy/uninstall-production.sh
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
log() {
|
||||||
|
echo -e "${GREEN}[bigtime]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
warn() {
|
||||||
|
echo -e "${YELLOW}[bigtime]${NC} $*"
|
||||||
|
}
|
||||||
|
|
||||||
|
SYSTEMD_UNITS=(
|
||||||
|
bigtime-agent.service
|
||||||
|
bigtime-trading.service
|
||||||
|
bigtime-news.service
|
||||||
|
bigtime-runtime.service
|
||||||
|
)
|
||||||
|
|
||||||
|
NGINX_CONF="/etc/nginx/conf.d/bigtime.conf"
|
||||||
|
ENV_FILE="/etc/bigtime/bigtime.env"
|
||||||
|
|
||||||
|
for unit in "${SYSTEMD_UNITS[@]}"; do
|
||||||
|
if systemctl list-unit-files "${unit}" >/dev/null 2>&1; then
|
||||||
|
warn "Stopping ${unit}"
|
||||||
|
sudo systemctl disable --now "${unit}" || true
|
||||||
|
sudo rm -f "/etc/systemd/system/${unit}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
sudo systemctl daemon-reload || true
|
||||||
|
|
||||||
|
if [[ -f "${NGINX_CONF}" ]]; then
|
||||||
|
warn "Removing nginx config ${NGINX_CONF}"
|
||||||
|
sudo rm -f "${NGINX_CONF}"
|
||||||
|
sudo nginx -t && sudo systemctl reload nginx || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -f "${ENV_FILE}" ]]; then
|
||||||
|
warn "Keeping env file ${ENV_FILE}"
|
||||||
|
warn "Delete it manually if you want a full cleanup."
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "BigTime production service uninstall finished."
|
||||||
BIN
frontend/public/media/0.png
Normal file
|
After Width: | Height: | Size: 107 KiB |
BIN
frontend/public/media/1.png
Normal file
|
After Width: | Height: | Size: 115 KiB |
BIN
frontend/public/media/10.png
Normal file
|
After Width: | Height: | Size: 141 KiB |
BIN
frontend/public/media/11.png
Normal file
|
After Width: | Height: | Size: 146 KiB |
BIN
frontend/public/media/2.png
Normal file
|
After Width: | Height: | Size: 117 KiB |
BIN
frontend/public/media/3.png
Normal file
|
After Width: | Height: | Size: 140 KiB |
BIN
frontend/public/media/4.png
Normal file
|
After Width: | Height: | Size: 147 KiB |
BIN
frontend/public/media/5.png
Normal file
|
After Width: | Height: | Size: 106 KiB |
BIN
frontend/public/media/6.png
Normal file
|
After Width: | Height: | Size: 124 KiB |
BIN
frontend/public/media/7.png
Normal file
|
After Width: | Height: | Size: 117 KiB |
BIN
frontend/public/media/8.png
Normal file
|
After Width: | Height: | Size: 151 KiB |
BIN
frontend/public/media/9.png
Normal file
|
After Width: | Height: | Size: 150 KiB |
@@ -8,12 +8,17 @@ import { useFeedProcessor } from './hooks/useFeedProcessor';
|
|||||||
import { useRuntimeControls } from './hooks/useRuntimeControls';
|
import { useRuntimeControls } from './hooks/useRuntimeControls';
|
||||||
import { useStockDataRequests } from './hooks/useStockDataRequests';
|
import { useStockDataRequests } from './hooks/useStockDataRequests';
|
||||||
import { useWebSocketConnection } from './hooks/useWebSocketConnection';
|
import { useWebSocketConnection } from './hooks/useWebSocketConnection';
|
||||||
import { fetchRuntimeLogs } from './services/runtimeApi';
|
import { fetchRuntimeAgents, fetchRuntimeLogs } from './services/runtimeApi';
|
||||||
import { useAgentRunFileState, useAgentStore } from './store/agentStore';
|
import { useAgentRunFileState, useAgentStore } from './store/agentStore';
|
||||||
import { useMarketStore } from './store/marketStore';
|
import { useMarketStore } from './store/marketStore';
|
||||||
import { usePortfolioStore } from './store/portfolioStore';
|
import { usePortfolioStore } from './store/portfolioStore';
|
||||||
import { useRuntimeStore } from './store/runtimeStore';
|
import { useRuntimeStore } from './store/runtimeStore';
|
||||||
import { useUIStore } from './store/uiStore';
|
import { useUIStore } from './store/uiStore';
|
||||||
|
import {
|
||||||
|
buildRuntimeAgentMeta,
|
||||||
|
findAgentByIdOrName,
|
||||||
|
sortRuntimeAgents,
|
||||||
|
} from './utils/agentDisplay';
|
||||||
|
|
||||||
const EDITABLE_AGENT_WORKSPACE_FILES = [
|
const EDITABLE_AGENT_WORKSPACE_FILES = [
|
||||||
'SOUL.md',
|
'SOUL.md',
|
||||||
@@ -174,21 +179,57 @@ export default function LiveTradingApp() {
|
|||||||
const [isRuntimeLogsLoading, setIsRuntimeLogsLoading] = useState(false);
|
const [isRuntimeLogsLoading, setIsRuntimeLogsLoading] = useState(false);
|
||||||
const [runtimeLogsPayload, setRuntimeLogsPayload] = useState(null);
|
const [runtimeLogsPayload, setRuntimeLogsPayload] = useState(null);
|
||||||
const [runtimeLogsError, setRuntimeLogsError] = useState(null);
|
const [runtimeLogsError, setRuntimeLogsError] = useState(null);
|
||||||
|
const [runtimeAgents, setRuntimeAgents] = useState([]);
|
||||||
const agentFeedRef = useRef(null);
|
const agentFeedRef = useRef(null);
|
||||||
const isSocketReady = isConnected && connectionStatus === 'connected';
|
const isSocketReady = isConnected && connectionStatus === 'connected';
|
||||||
|
|
||||||
const selectedAgentId = selectedSkillAgentId || AGENTS[0]?.id || null;
|
const resolvedAgents = useMemo(() => {
|
||||||
|
if (!Array.isArray(runtimeAgents) || runtimeAgents.length === 0) {
|
||||||
|
return AGENTS;
|
||||||
|
}
|
||||||
|
|
||||||
|
return sortRuntimeAgents(runtimeAgents).map((agentState, index) => {
|
||||||
|
const agentId = String(agentState?.agent_id || agentState?.id || '').trim();
|
||||||
|
const base = buildRuntimeAgentMeta(agentId, index);
|
||||||
|
const displayName = typeof agentState?.display_name === 'string' ? agentState.display_name.trim() : '';
|
||||||
|
return {
|
||||||
|
...base,
|
||||||
|
id: agentId,
|
||||||
|
name: displayName || base.name,
|
||||||
|
runtimeStatus: agentState?.status || null,
|
||||||
|
lastSession: agentState?.last_session || null,
|
||||||
|
lastUpdated: agentState?.last_updated || null,
|
||||||
|
};
|
||||||
|
}).filter((agent) => agent.id);
|
||||||
|
}, [runtimeAgents]);
|
||||||
|
|
||||||
|
const selectedAgentId = selectedSkillAgentId || resolvedAgents[0]?.id || null;
|
||||||
const selectedAgentProfile = selectedAgentId ? (agentProfilesByAgent[selectedAgentId] || null) : null;
|
const selectedAgentProfile = selectedAgentId ? (agentProfilesByAgent[selectedAgentId] || null) : null;
|
||||||
const selectedAgentSkills = selectedAgentId ? (agentSkillsByAgent[selectedAgentId] || []) : [];
|
const selectedAgentSkills = selectedAgentId ? (agentSkillsByAgent[selectedAgentId] || []) : [];
|
||||||
const selectedRunFileContent = selectedAgentId && selectedRunFile
|
const selectedRunFileContent = selectedAgentId && selectedRunFile
|
||||||
? (runFilesByAgent[selectedAgentId]?.[selectedRunFile] || '')
|
? (runFilesByAgent[selectedAgentId]?.[selectedRunFile] || '')
|
||||||
: '';
|
: '';
|
||||||
|
|
||||||
useEffect(() => {
|
const loadRuntimeAgentsList = useCallback(async () => {
|
||||||
if (!selectedSkillAgentId && AGENTS.length > 0) {
|
try {
|
||||||
setSelectedSkillAgentId(AGENTS[0].id);
|
const payload = await fetchRuntimeAgents();
|
||||||
|
setRuntimeAgents(Array.isArray(payload?.agents) ? payload.agents : []);
|
||||||
|
} catch {
|
||||||
|
setRuntimeAgents([]);
|
||||||
}
|
}
|
||||||
}, [selectedSkillAgentId, setSelectedSkillAgentId]);
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!selectedSkillAgentId && resolvedAgents.length > 0) {
|
||||||
|
setSelectedSkillAgentId(resolvedAgents[0].id);
|
||||||
|
}
|
||||||
|
}, [resolvedAgents, selectedSkillAgentId, setSelectedSkillAgentId]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (selectedSkillAgentId && !resolvedAgents.some((agent) => agent.id === selectedSkillAgentId)) {
|
||||||
|
setSelectedSkillAgentId(resolvedAgents[0]?.id || null);
|
||||||
|
}
|
||||||
|
}, [resolvedAgents, selectedSkillAgentId, setSelectedSkillAgentId]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!selectedRunFile) {
|
if (!selectedRunFile) {
|
||||||
@@ -196,6 +237,37 @@ export default function LiveTradingApp() {
|
|||||||
}
|
}
|
||||||
}, [selectedRunFile, setSelectedWorkspaceFile]);
|
}, [selectedRunFile, setSelectedWorkspaceFile]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
void loadRuntimeAgentsList();
|
||||||
|
}, [loadRuntimeAgentsList]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const handleRuntimeAgentsUpdated = () => {
|
||||||
|
void loadRuntimeAgentsList();
|
||||||
|
};
|
||||||
|
window.addEventListener('runtime-agents-updated', handleRuntimeAgentsUpdated);
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('runtime-agents-updated', handleRuntimeAgentsUpdated);
|
||||||
|
};
|
||||||
|
}, [loadRuntimeAgentsList]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isSocketReady) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
void loadRuntimeAgentsList();
|
||||||
|
}, [isSocketReady, loadRuntimeAgentsList]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!selectedAgentId || !selectedRunFile) {
|
||||||
|
setRunDraftContent('');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cachedContent = runFilesByAgent[selectedAgentId]?.[selectedRunFile];
|
||||||
|
setRunDraftContent(typeof cachedContent === 'string' ? cachedContent : '');
|
||||||
|
}, [runFilesByAgent, selectedAgentId, selectedRunFile, setRunDraftContent]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!isSocketReady || !selectedAgentId || !clientRef.current) {
|
if (!isSocketReady || !selectedAgentId || !clientRef.current) {
|
||||||
return;
|
return;
|
||||||
@@ -233,7 +305,7 @@ export default function LiveTradingApp() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
AGENTS.forEach((agent) => {
|
resolvedAgents.forEach((agent) => {
|
||||||
if (!agent?.id) {
|
if (!agent?.id) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -246,6 +318,7 @@ export default function LiveTradingApp() {
|
|||||||
clientRef,
|
clientRef,
|
||||||
isSocketReady,
|
isSocketReady,
|
||||||
requestAgentProfile,
|
requestAgentProfile,
|
||||||
|
resolvedAgents,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -326,13 +399,13 @@ export default function LiveTradingApp() {
|
|||||||
const bubbleFor = useCallback((idOrName) => {
|
const bubbleFor = useCallback((idOrName) => {
|
||||||
let bubble = bubbles[idOrName];
|
let bubble = bubbles[idOrName];
|
||||||
if (bubble) return bubble;
|
if (bubble) return bubble;
|
||||||
const agent = AGENTS.find((item) => item.name === idOrName || item.id === idOrName);
|
const agent = findAgentByIdOrName(resolvedAgents, idOrName);
|
||||||
if (agent) {
|
if (agent) {
|
||||||
bubble = bubbles[agent.id];
|
bubble = bubbles[agent.id];
|
||||||
if (bubble) return bubble;
|
if (bubble) return bubble;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}, [bubbles]);
|
}, [bubbles, resolvedAgents]);
|
||||||
|
|
||||||
const handleManualTrigger = useCallback(() => {
|
const handleManualTrigger = useCallback(() => {
|
||||||
if (!isSocketReady || !clientRef.current) {
|
if (!isSocketReady || !clientRef.current) {
|
||||||
@@ -361,7 +434,7 @@ export default function LiveTradingApp() {
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const agentRequests = {
|
const agentRequests = {
|
||||||
agents: AGENTS,
|
agents: resolvedAgents,
|
||||||
agentProfilesByAgent,
|
agentProfilesByAgent,
|
||||||
agentSkillsByAgent,
|
agentSkillsByAgent,
|
||||||
runFilesByAgent,
|
runFilesByAgent,
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import React, { useState, useRef, useImperativeHandle, forwardRef } from 'react';
|
import React, { useState, useRef, useImperativeHandle, forwardRef } from 'react';
|
||||||
import { formatTime } from '../utils/formatters';
|
import { formatTime } from '../utils/formatters';
|
||||||
import { MESSAGE_COLORS, getAgentColors, AGENTS, ASSETS } from '../config/constants';
|
import { MESSAGE_COLORS, getAgentColors, ASSETS } from '../config/constants';
|
||||||
import { getModelIcon } from '../utils/modelIcons';
|
import { getModelIcon } from '../utils/modelIcons';
|
||||||
import MarkdownModal from './MarkdownModal';
|
import MarkdownModal from './MarkdownModal';
|
||||||
import LobeModelLogo from './LobeModelLogo.jsx';
|
import LobeModelLogo from './LobeModelLogo.jsx';
|
||||||
|
import { findAgentByIdOrName, humanizeAgentId } from '../utils/agentDisplay';
|
||||||
|
|
||||||
const isAnalyst = (agentId, agentName) => {
|
const isAnalyst = (agentId, agentName) => {
|
||||||
if (agentId && agentId.includes('analyst')) return true;
|
if (agentId && agentId.includes('analyst')) return true;
|
||||||
@@ -36,7 +37,7 @@ const stripMarkdown = (text) => {
|
|||||||
.replace(/^[-=]+$/gm, '');
|
.replace(/^[-=]+$/gm, '');
|
||||||
};
|
};
|
||||||
|
|
||||||
const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref) => {
|
const AgentFeed = forwardRef(({ agents = [], feed, leaderboard, agentProfilesByAgent }, ref) => {
|
||||||
const feedContentRef = useRef(null);
|
const feedContentRef = useRef(null);
|
||||||
const [highlightedId, setHighlightedId] = useState(null);
|
const [highlightedId, setHighlightedId] = useState(null);
|
||||||
const [selectedAgent, setSelectedAgent] = useState('all');
|
const [selectedAgent, setSelectedAgent] = useState('all');
|
||||||
@@ -62,7 +63,7 @@ const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref)
|
|||||||
// Get agent info by name
|
// Get agent info by name
|
||||||
const getAgentInfoByName = (agentName) => {
|
const getAgentInfoByName = (agentName) => {
|
||||||
if (!agentName) return null;
|
if (!agentName) return null;
|
||||||
const agentConfig = AGENTS.find((agent) => agent.name === agentName);
|
const agentConfig = findAgentByIdOrName(agents, agentName);
|
||||||
const profile = agentConfig ? agentProfilesByAgent?.[agentConfig.id] : null;
|
const profile = agentConfig ? agentProfilesByAgent?.[agentConfig.id] : null;
|
||||||
if (agentConfig && profile?.model_name) {
|
if (agentConfig && profile?.model_name) {
|
||||||
return {
|
return {
|
||||||
@@ -81,7 +82,7 @@ const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref)
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get unique agent names from feed (only registered agents in AGENTS)
|
// Get unique agent names from feed using the current runtime agent list.
|
||||||
const getUniqueAgents = () => {
|
const getUniqueAgents = () => {
|
||||||
const agentNamesInFeed = new Set();
|
const agentNamesInFeed = new Set();
|
||||||
|
|
||||||
@@ -98,9 +99,10 @@ const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref)
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Filter to only include registered agents and sort by AGENTS array order
|
const orderedRuntimeNames = agents.map((agent) => agent.name);
|
||||||
const registeredAgentNames = AGENTS.map(a => a.name);
|
const knownNames = orderedRuntimeNames.filter(name => agentNamesInFeed.has(name));
|
||||||
return registeredAgentNames.filter(name => agentNamesInFeed.has(name));
|
const extraNames = [...agentNamesInFeed].filter(name => !orderedRuntimeNames.includes(name));
|
||||||
|
return [...knownNames, ...extraNames];
|
||||||
};
|
};
|
||||||
|
|
||||||
// Filter feed based on selected agent
|
// Filter feed based on selected agent
|
||||||
@@ -177,6 +179,12 @@ const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref)
|
|||||||
|
|
||||||
const currentSelection = getCurrentSelectionInfo();
|
const currentSelection = getCurrentSelectionInfo();
|
||||||
|
|
||||||
|
const resolveAgentDisplayName = (name, agentId) => {
|
||||||
|
if (name) return name;
|
||||||
|
const agent = findAgentByIdOrName(agents, agentId);
|
||||||
|
return agent?.name || humanizeAgentId(agentId);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="agent-feed">
|
<div className="agent-feed">
|
||||||
<div className="agent-feed-header">
|
<div className="agent-feed-header">
|
||||||
@@ -241,7 +249,7 @@ const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref)
|
|||||||
type="color"
|
type="color"
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<span>{agent}</span>
|
<span>{resolveAgentDisplayName(agent, agentInfo?.agentId)}</span>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
@@ -255,7 +263,7 @@ const AgentFeed = forwardRef(({ feed, leaderboard, agentProfilesByAgent }, ref)
|
|||||||
<div className="empty-state">
|
<div className="empty-state">
|
||||||
{selectedAgent === 'all'
|
{selectedAgent === 'all'
|
||||||
? '等待系统更新...'
|
? '等待系统更新...'
|
||||||
: `${selectedAgent} 没有消息`}
|
: `${resolveAgentDisplayName(selectedAgent, currentSelection.agentInfo?.agentId)} 没有消息`}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ import GlobalStyles from '../styles/GlobalStyles';
|
|||||||
import Header from './Header.jsx';
|
import Header from './Header.jsx';
|
||||||
import RuntimeSettingsPanel from './RuntimeSettingsPanel.jsx';
|
import RuntimeSettingsPanel from './RuntimeSettingsPanel.jsx';
|
||||||
import NetValueChart from './NetValueChart.jsx';
|
import NetValueChart from './NetValueChart.jsx';
|
||||||
import { AGENTS } from '../config/constants';
|
|
||||||
import { useRuntimeStore } from '../store/runtimeStore';
|
import { useRuntimeStore } from '../store/runtimeStore';
|
||||||
import { useUIStore } from '../store/uiStore';
|
import { useUIStore } from '../store/uiStore';
|
||||||
import { formatNumber, formatTickerPrice } from '../utils/formatters';
|
import { formatNumber, formatTickerPrice } from '../utils/formatters';
|
||||||
@@ -401,6 +400,7 @@ export default function AppShell({
|
|||||||
<div className="view-panel">
|
<div className="view-panel">
|
||||||
<Suspense fallback={<ViewLoadingFallback label="加载交易室..." />}>
|
<Suspense fallback={<ViewLoadingFallback label="加载交易室..." />}>
|
||||||
<RoomView
|
<RoomView
|
||||||
|
agents={agentRequests.agents}
|
||||||
bubbles={bubbles}
|
bubbles={bubbles}
|
||||||
bubbleFor={bubbleFor}
|
bubbleFor={bubbleFor}
|
||||||
leaderboard={leaderboard}
|
leaderboard={leaderboard}
|
||||||
@@ -501,7 +501,7 @@ export default function AppShell({
|
|||||||
{/* Right Panel: Agent Feed */}
|
{/* Right Panel: Agent Feed */}
|
||||||
<div className="right-panel" style={{ width: `${100 - leftWidth}%` }}>
|
<div className="right-panel" style={{ width: `${100 - leftWidth}%` }}>
|
||||||
<Suspense fallback={<ViewLoadingFallback label="加载消息流..." />}>
|
<Suspense fallback={<ViewLoadingFallback label="加载消息流..." />}>
|
||||||
<AgentFeed ref={agentFeedRef} feed={feed} leaderboard={leaderboard} agentProfilesByAgent={agentProfilesByAgent} />
|
<AgentFeed ref={agentFeedRef} agents={agentRequests.agents} feed={feed} leaderboard={leaderboard} agentProfilesByAgent={agentProfilesByAgent} />
|
||||||
</Suspense>
|
</Suspense>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import React, { useEffect, useMemo, useRef, useState, useCallback } from 'react';
|
import React, { useEffect, useMemo, useRef, useState, useCallback } from 'react';
|
||||||
import { ASSETS, SCENE_NATIVE, AGENT_SEATS, AGENTS } from '../config/constants';
|
import { ASSETS, SCENE_NATIVE, AGENT_SEATS } from '../config/constants';
|
||||||
import AgentCard from './AgentCard';
|
import AgentCard from './AgentCard';
|
||||||
import { getModelIcon } from '../utils/modelIcons';
|
import { getModelIcon } from '../utils/modelIcons';
|
||||||
import LobeModelLogo from './LobeModelLogo.jsx';
|
import LobeModelLogo from './LobeModelLogo.jsx';
|
||||||
|
import { findAgentByIdOrName } from '../utils/agentDisplay';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom hook to load an image
|
* Custom hook to load an image
|
||||||
@@ -48,7 +49,22 @@ function getRankMedal(rank) {
|
|||||||
* Supports click and hover (1.5s) to show agent performance cards
|
* Supports click and hover (1.5s) to show agent performance cards
|
||||||
* Supports replay mode - completely independent from live mode
|
* Supports replay mode - completely independent from live mode
|
||||||
*/
|
*/
|
||||||
export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfilesByAgent, feed, onJumpToMessage, onOpenLaunchConfig }) {
|
function getSeatPosition(index) {
|
||||||
|
if (AGENT_SEATS[index]) {
|
||||||
|
return AGENT_SEATS[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
const overflowIndex = index - AGENT_SEATS.length;
|
||||||
|
const columns = 3;
|
||||||
|
const row = Math.floor(overflowIndex / columns);
|
||||||
|
const column = overflowIndex % columns;
|
||||||
|
return {
|
||||||
|
x: 0.18 + (column * 0.18),
|
||||||
|
y: Math.max(0.14, 0.22 - (row * 0.1)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function RoomView({ agents = [], bubbles, bubbleFor, leaderboard, agentProfilesByAgent, feed, onJumpToMessage, onOpenLaunchConfig }) {
|
||||||
const canvasRef = useRef(null);
|
const canvasRef = useRef(null);
|
||||||
const containerRef = useRef(null);
|
const containerRef = useRef(null);
|
||||||
|
|
||||||
@@ -152,16 +168,16 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
// Determine which agents are speaking
|
// Determine which agents are speaking
|
||||||
const speakingAgents = useMemo(() => {
|
const speakingAgents = useMemo(() => {
|
||||||
const speaking = {};
|
const speaking = {};
|
||||||
AGENTS.forEach(agent => {
|
agents.forEach(agent => {
|
||||||
const bubble = bubbleFor(agent.name);
|
const bubble = bubbleFor(agent.name);
|
||||||
speaking[agent.id] = !!bubble;
|
speaking[agent.id] = !!bubble;
|
||||||
});
|
});
|
||||||
return speaking;
|
return speaking;
|
||||||
}, [bubbles, bubbleFor]);
|
}, [agents, bubbleFor, bubbles]);
|
||||||
|
|
||||||
// Find agent data from leaderboard
|
// Find agent data from leaderboard
|
||||||
const getAgentData = (agentId) => {
|
const getAgentData = (agentId) => {
|
||||||
const agent = AGENTS.find(a => a.id === agentId);
|
const agent = agents.find(a => a.id === agentId);
|
||||||
if (!agent) return null;
|
if (!agent) return null;
|
||||||
const profile = agentProfilesByAgent?.[agentId] || null;
|
const profile = agentProfilesByAgent?.[agentId] || null;
|
||||||
|
|
||||||
@@ -195,7 +211,7 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge data but preserve the correct avatar from AGENTS config
|
// Merge data but preserve the configured visual metadata from frontend.
|
||||||
return {
|
return {
|
||||||
...agent,
|
...agent,
|
||||||
...leaderboardData,
|
...leaderboardData,
|
||||||
@@ -317,10 +333,7 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
// Skip system messages
|
// Skip system messages
|
||||||
if (msg.agent === 'System') return;
|
if (msg.agent === 'System') return;
|
||||||
// Find matching agent
|
// Find matching agent
|
||||||
const agent = AGENTS.find(a =>
|
const agent = findAgentByIdOrName(agents, msg.agentId || msg.agent);
|
||||||
a.id === msg.agentId ||
|
|
||||||
a.name === msg.agent
|
|
||||||
);
|
|
||||||
if (agent) {
|
if (agent) {
|
||||||
messages.push({
|
messages.push({
|
||||||
feedItemId: item.id,
|
feedItemId: item.id,
|
||||||
@@ -333,10 +346,7 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
} else if (item.type === 'conference' && item.data?.messages) {
|
} else if (item.type === 'conference' && item.data?.messages) {
|
||||||
item.data.messages.forEach((msg, msgIndex) => {
|
item.data.messages.forEach((msg, msgIndex) => {
|
||||||
if (msg.agent === 'System') return;
|
if (msg.agent === 'System') return;
|
||||||
const agent = AGENTS.find(a =>
|
const agent = findAgentByIdOrName(agents, msg.agentId || msg.agent);
|
||||||
a.id === msg.agentId ||
|
|
||||||
a.name === msg.agent
|
|
||||||
);
|
|
||||||
if (agent) {
|
if (agent) {
|
||||||
messages.push({
|
messages.push({
|
||||||
feedItemId: item.id,
|
feedItemId: item.id,
|
||||||
@@ -479,7 +489,7 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
if (isReplaying) {
|
if (isReplaying) {
|
||||||
// Find replay bubble for this agent
|
// Find replay bubble for this agent
|
||||||
const bubble = Object.values(replayBubbles).find(b => {
|
const bubble = Object.values(replayBubbles).find(b => {
|
||||||
const agent = AGENTS.find(a => a.id === b.agentId);
|
const agent = agents.find(a => a.id === b.agentId);
|
||||||
return agent && agent.name === agentName;
|
return agent && agent.name === agentName;
|
||||||
});
|
});
|
||||||
return bubble || null;
|
return bubble || null;
|
||||||
@@ -487,13 +497,13 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
// Use normal bubbleFor function
|
// Use normal bubbleFor function
|
||||||
return bubbleFor(agentName);
|
return bubbleFor(agentName);
|
||||||
}
|
}
|
||||||
}, [isReplaying, replayBubbles, bubbleFor]);
|
}, [agents, isReplaying, replayBubbles, bubbleFor]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="room-view">
|
<div className="room-view">
|
||||||
{/* Agents Indicator Bar */}
|
{/* Agents Indicator Bar */}
|
||||||
<div className="room-agents-indicator">
|
<div className="room-agents-indicator">
|
||||||
{AGENTS.map((agent, index) => {
|
{agents.map((agent, index) => {
|
||||||
const rank = getAgentRank(agent.id);
|
const rank = getAgentRank(agent.id);
|
||||||
const medal = rank ? getRankMedal(rank) : null;
|
const medal = rank ? getRankMedal(rank) : null;
|
||||||
const agentData = getAgentData(agent.id);
|
const agentData = getAgentData(agent.id);
|
||||||
@@ -572,7 +582,7 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
<canvas ref={canvasRef} className="room-canvas" />
|
<canvas ref={canvasRef} className="room-canvas" />
|
||||||
|
|
||||||
{/* Speech Bubbles */}
|
{/* Speech Bubbles */}
|
||||||
{AGENTS.map((agent, idx) => {
|
{agents.map((agent, idx) => {
|
||||||
const bubble = getBubbleForAgent(agent.name);
|
const bubble = getBubbleForAgent(agent.name);
|
||||||
if (!bubble) return null;
|
if (!bubble) return null;
|
||||||
|
|
||||||
@@ -581,7 +591,7 @@ export default function RoomView({ bubbles, bubbleFor, leaderboard, agentProfile
|
|||||||
// Check if bubble is hidden
|
// Check if bubble is hidden
|
||||||
if (hiddenBubbles[bubbleKey]) return null;
|
if (hiddenBubbles[bubbleKey]) return null;
|
||||||
|
|
||||||
const pos = AGENT_SEATS[idx];
|
const pos = getSeatPosition(idx);
|
||||||
const scaledWidth = SCENE_NATIVE.width * scale;
|
const scaledWidth = SCENE_NATIVE.width * scale;
|
||||||
const scaledHeight = SCENE_NATIVE.height * scale;
|
const scaledHeight = SCENE_NATIVE.height * scale;
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
const trimTrailingSlash = (value) => value.replace(/\/+$/, "");
|
const trimTrailingSlash = (value) => value.replace(/\/+$/, "");
|
||||||
|
const mediaAsset = (filename) => `/media/${filename}`;
|
||||||
const isLocalDevHost = () => {
|
const isLocalDevHost = () => {
|
||||||
if (typeof window === "undefined") {
|
if (typeof window === "undefined") {
|
||||||
return false;
|
return false;
|
||||||
@@ -14,12 +15,12 @@ const isLocalDevHost = () => {
|
|||||||
// Centralized CDN asset URLs
|
// Centralized CDN asset URLs
|
||||||
export const CDN_ASSETS = {
|
export const CDN_ASSETS = {
|
||||||
companyRoom: {
|
companyRoom: {
|
||||||
agent_1: "https://img.alicdn.com/imgextra/i4/O1CN01Lr7SOl1lSExV0tOwv_!!6000000004817-2-tps-370-320.png",
|
agent_1: mediaAsset("0.png"),
|
||||||
agent_2: "https://img.alicdn.com/imgextra/i3/O1CN017Kb8cY1VQNUmuK47o_!!6000000002647-2-tps-368-312.png",
|
agent_2: mediaAsset("1.png"),
|
||||||
agent_3: "https://img.alicdn.com/imgextra/i3/O1CN010Fp55w1YqtGpVjgsS_!!6000000003111-2-tps-370-320.png",
|
agent_3: mediaAsset("2.png"),
|
||||||
agent_4: "https://img.alicdn.com/imgextra/i3/O1CN01VnUsML1Dkq6fHw3ks_!!6000000000255-2-tps-366-316.png",
|
agent_4: mediaAsset("3.png"),
|
||||||
agent_5: "https://img.alicdn.com/imgextra/i4/O1CN01o0kCQw1kyvbulBSl7_!!6000000004753-2-tps-370-314.png",
|
agent_5: mediaAsset("4.png"),
|
||||||
agent_6: "https://img.alicdn.com/imgextra/i2/O1CN01cLV0zl1FI6ULAunTp_!!6000000000463-2-tps-368-320.png",
|
agent_6: mediaAsset("5.png"),
|
||||||
team_logo: "https://img.alicdn.com/imgextra/i2/O1CN01n2S8aV25hcZhhNH95_!!6000000007558-2-tps-616-700.png",
|
team_logo: "https://img.alicdn.com/imgextra/i2/O1CN01n2S8aV25hcZhhNH95_!!6000000007558-2-tps-616-700.png",
|
||||||
reme_logo: "https://img.alicdn.com/imgextra/i2/O1CN01FhncuT1Tqp8LfCaft_!!6000000002434-2-tps-915-250.png",
|
reme_logo: "https://img.alicdn.com/imgextra/i2/O1CN01FhncuT1Tqp8LfCaft_!!6000000002434-2-tps-915-250.png",
|
||||||
full_room_dark: "https://img.alicdn.com/imgextra/i2/O1CN014sOgzK28re5haGC3X_!!6000000007986-2-tps-1248-832.png",
|
full_room_dark: "https://img.alicdn.com/imgextra/i2/O1CN014sOgzK28re5haGC3X_!!6000000007986-2-tps-1248-832.png",
|
||||||
@@ -45,6 +46,14 @@ export const ASSETS = {
|
|||||||
remeLogo: CDN_ASSETS.companyRoom.reme_logo,
|
remeLogo: CDN_ASSETS.companyRoom.reme_logo,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const NON_MANAGER_AVATAR_POOL = Array.from({ length: 10 }, (_, index) => (
|
||||||
|
mediaAsset(`${index + 2}.png`)
|
||||||
|
));
|
||||||
|
|
||||||
|
export const DYNAMIC_ANALYST_AVATAR_POOL = Array.from({ length: 6 }, (_, index) => (
|
||||||
|
mediaAsset(`${index + 6}.png`)
|
||||||
|
));
|
||||||
|
|
||||||
// Scene dimensions (actual image size)
|
// Scene dimensions (actual image size)
|
||||||
export const SCENE_NATIVE = { width: 1248, height: 832 };
|
export const SCENE_NATIVE = { width: 1248, height: 832 };
|
||||||
|
|
||||||
@@ -383,4 +392,3 @@ export const suggestAgentId = (name, baseType) => {
|
|||||||
// Must end with '_analyst' to get analysis tools registered
|
// Must end with '_analyst' to get analysis tools registered
|
||||||
return `${normalized || baseType}_${timestamp}_analyst`;
|
return `${normalized || baseType}_${timestamp}_analyst`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { useState, useCallback, useRef } from "react";
|
import { useState, useCallback, useRef } from "react";
|
||||||
import { AGENTS } from "../config/constants";
|
import { AGENTS } from "../config/constants";
|
||||||
|
import { humanizeAgentId } from "../utils/agentDisplay";
|
||||||
|
|
||||||
const MAX_FEED_ITEMS = 200;
|
const MAX_FEED_ITEMS = 200;
|
||||||
|
|
||||||
@@ -108,7 +109,7 @@ const eventToMessage = (evt) => {
|
|||||||
id: generateId("msg"),
|
id: generateId("msg"),
|
||||||
timestamp,
|
timestamp,
|
||||||
agentId: evt.agentId,
|
agentId: evt.agentId,
|
||||||
agent: normalizeAgentLabel(agent?.name || evt.agentName || evt.agentId || "Agent", evt.agentId),
|
agent: normalizeAgentLabel(agent?.name || evt.agentName || humanizeAgentId(evt.agentId) || "Agent", evt.agentId),
|
||||||
role: agent?.role || evt.role || "Agent",
|
role: agent?.role || evt.role || "Agent",
|
||||||
content: evt.content
|
content: evt.content
|
||||||
};
|
};
|
||||||
@@ -118,7 +119,7 @@ const eventToMessage = (evt) => {
|
|||||||
id: generateId("memory"),
|
id: generateId("memory"),
|
||||||
timestamp,
|
timestamp,
|
||||||
agentId: evt.agentId,
|
agentId: evt.agentId,
|
||||||
agent: agent?.name || evt.agentId || "Memory",
|
agent: agent?.name || humanizeAgentId(evt.agentId) || "Memory",
|
||||||
role: "Memory",
|
role: "Memory",
|
||||||
content: evt.content || evt.text || ""
|
content: evt.content || evt.text || ""
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { useEffect, useRef, useCallback } from 'react';
|
import { useEffect, useRef, useCallback } from 'react';
|
||||||
import { AGENTS } from '../config/constants';
|
import { AGENTS } from '../config/constants';
|
||||||
|
import { fetchRuntimeAgents } from '../services/runtimeApi';
|
||||||
import { ReadOnlyClient } from '../services/websocket';
|
import { ReadOnlyClient } from '../services/websocket';
|
||||||
import { useRuntimeStore } from '../store/runtimeStore';
|
import { useRuntimeStore } from '../store/runtimeStore';
|
||||||
import { useOpenClawStore } from '../store/openclawStore';
|
import { useOpenClawStore } from '../store/openclawStore';
|
||||||
@@ -8,6 +9,7 @@ import { usePortfolioStore } from '../store/portfolioStore';
|
|||||||
import { useAgentStore } from '../store/agentStore';
|
import { useAgentStore } from '../store/agentStore';
|
||||||
import { useUIStore } from '../store/uiStore';
|
import { useUIStore } from '../store/uiStore';
|
||||||
import { normalizeTickerSymbols } from '../services/runtimeControls';
|
import { normalizeTickerSymbols } from '../services/runtimeControls';
|
||||||
|
import { humanizeAgentId } from '../utils/agentDisplay';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalize price history from server format
|
* Normalize price history from server format
|
||||||
@@ -401,7 +403,7 @@ export function useWebSocketConnection({
|
|||||||
setLocalSkillDraftsByKey, setIsAgentSkillsLoading, setSkillDetailLoadingKey,
|
setLocalSkillDraftsByKey, setIsAgentSkillsLoading, setSkillDetailLoadingKey,
|
||||||
setAgentSkillsSavingKey, setAgentSkillsFeedback, setIsWorkspaceFileLoading,
|
setAgentSkillsSavingKey, setAgentSkillsFeedback, setIsWorkspaceFileLoading,
|
||||||
setWorkspaceFileSavingKey, setWorkspaceFilesByAgent, setWorkspaceFileFeedback,
|
setWorkspaceFileSavingKey, setWorkspaceFilesByAgent, setWorkspaceFileFeedback,
|
||||||
selectedSkillAgentId } = useAgentStore();
|
setWorkspaceDraftContent, selectedSkillAgentId } = useAgentStore();
|
||||||
|
|
||||||
const { setBubbles } = useUIStore();
|
const { setBubbles } = useUIStore();
|
||||||
|
|
||||||
@@ -705,14 +707,19 @@ export function useWebSocketConnection({
|
|||||||
agent_workspace_file_loaded: (e) => {
|
agent_workspace_file_loaded: (e) => {
|
||||||
const agentId = typeof e.agent_id === 'string' ? e.agent_id.trim() : '';
|
const agentId = typeof e.agent_id === 'string' ? e.agent_id.trim() : '';
|
||||||
const filename = typeof e.filename === 'string' ? e.filename.trim() : '';
|
const filename = typeof e.filename === 'string' ? e.filename.trim() : '';
|
||||||
|
const content = typeof e.content === 'string' ? e.content : '';
|
||||||
if (!agentId || !filename) {
|
if (!agentId || !filename) {
|
||||||
setIsWorkspaceFileLoading(false);
|
setIsWorkspaceFileLoading(false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
setWorkspaceFilesByAgent((prev) => ({
|
setWorkspaceFilesByAgent((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
[agentId]: { ...(prev[agentId] || {}), [filename]: typeof e.content === 'string' ? e.content : '' }
|
[agentId]: { ...(prev[agentId] || {}), [filename]: content }
|
||||||
}));
|
}));
|
||||||
|
const { selectedSkillAgentId: currentAgentId, selectedWorkspaceFile: currentFilename } = useAgentStore.getState();
|
||||||
|
if (currentAgentId === agentId && currentFilename === filename) {
|
||||||
|
setWorkspaceDraftContent(content);
|
||||||
|
}
|
||||||
setIsWorkspaceFileLoading(false);
|
setIsWorkspaceFileLoading(false);
|
||||||
setWorkspaceFileSavingKey(null);
|
setWorkspaceFileSavingKey(null);
|
||||||
},
|
},
|
||||||
@@ -1018,16 +1025,25 @@ export function useWebSocketConnection({
|
|||||||
|
|
||||||
agent_message: (e) => {
|
agent_message: (e) => {
|
||||||
const agent = AGENTS.find((item) => item.id === e.agentId);
|
const agent = AGENTS.find((item) => item.id === e.agentId);
|
||||||
setBubbles({ [e.agentId]: { text: e.content, ts: Date.now(), agentName: agent?.name || e.agentName || e.agentId } });
|
setBubbles({ [e.agentId]: { text: e.content, ts: Date.now(), agentName: agent?.name || e.agentName || humanizeAgentId(e.agentId) } });
|
||||||
processFeedEvent(e);
|
processFeedEvent(e);
|
||||||
},
|
},
|
||||||
|
|
||||||
conference_message: (e) => {
|
conference_message: (e) => {
|
||||||
const agent = AGENTS.find((item) => item.id === e.agentId);
|
const agent = AGENTS.find((item) => item.id === e.agentId);
|
||||||
setBubbles({ [e.agentId]: { text: e.content, ts: Date.now(), agentName: agent?.name || e.agentName || e.agentId } });
|
setBubbles({ [e.agentId]: { text: e.content, ts: Date.now(), agentName: agent?.name || e.agentName || humanizeAgentId(e.agentId) } });
|
||||||
processFeedEvent(e);
|
processFeedEvent(e);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
runtime_agents_updated: async () => {
|
||||||
|
try {
|
||||||
|
await fetchRuntimeAgents();
|
||||||
|
window.dispatchEvent(new CustomEvent('runtime-agents-updated'));
|
||||||
|
} catch {
|
||||||
|
// Ignore refresh failures; next manual/runtime refresh will recover.
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
memory: (e) => processFeedEvent(e),
|
memory: (e) => processFeedEvent(e),
|
||||||
|
|
||||||
team_summary: (e) => {
|
team_summary: (e) => {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { create } from 'zustand';
|
import { create } from 'zustand';
|
||||||
|
import { useShallow } from 'zustand/react/shallow';
|
||||||
|
|
||||||
const resolveValue = (updater, currentValue) => (
|
const resolveValue = (updater, currentValue) => (
|
||||||
typeof updater === 'function' ? updater(currentValue) : updater
|
typeof updater === 'function' ? updater(currentValue) : updater
|
||||||
@@ -66,13 +67,15 @@ export const useAgentStore = create((set) => ({
|
|||||||
* Run-scoped file editing state currently reuses legacy `workspace*` field
|
* Run-scoped file editing state currently reuses legacy `workspace*` field
|
||||||
* names inside the store. Prefer this selector for new runtime UI code.
|
* names inside the store. Prefer this selector for new runtime UI code.
|
||||||
*/
|
*/
|
||||||
export const useAgentRunFileState = () => useAgentStore((state) => ({
|
export const useAgentRunFileState = () => useAgentStore(
|
||||||
selectedRunFile: state.selectedWorkspaceFile,
|
useShallow((state) => ({
|
||||||
runFilesByAgent: state.workspaceFilesByAgent,
|
selectedRunFile: state.selectedWorkspaceFile,
|
||||||
runDraftContent: state.workspaceDraftContent,
|
runFilesByAgent: state.workspaceFilesByAgent,
|
||||||
isRunFileLoading: state.isWorkspaceFileLoading,
|
runDraftContent: state.workspaceDraftContent,
|
||||||
runFileSavingKey: state.workspaceFileSavingKey,
|
isRunFileLoading: state.isWorkspaceFileLoading,
|
||||||
runFileFeedback: state.workspaceFileFeedback,
|
runFileSavingKey: state.workspaceFileSavingKey,
|
||||||
setSelectedRunFile: state.setSelectedWorkspaceFile,
|
runFileFeedback: state.workspaceFileFeedback,
|
||||||
setRunDraftContent: state.setWorkspaceDraftContent,
|
setSelectedRunFile: state.setSelectedWorkspaceFile,
|
||||||
}));
|
setRunDraftContent: state.setWorkspaceDraftContent,
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|||||||
77
frontend/src/utils/agentDisplay.js
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
import { AGENTS, DYNAMIC_ANALYST_AVATAR_POOL } from '../config/constants';
|
||||||
|
|
||||||
|
export const STATIC_AGENT_INDEX = new Map(AGENTS.map((agent, index) => [agent.id, { agent, index }]));
|
||||||
|
const ANALYST_COLOR_PALETTE = AGENTS.filter((agent) => agent.id.endsWith('_analyst')).map((agent) => agent.colors);
|
||||||
|
|
||||||
|
const FALLBACK_AGENT_VISUALS = DYNAMIC_ANALYST_AVATAR_POOL.map((avatar, index) => {
|
||||||
|
return {
|
||||||
|
avatar,
|
||||||
|
colors: ANALYST_COLOR_PALETTE[index % Math.max(ANALYST_COLOR_PALETTE.length, 1)] || AGENTS[0].colors,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
export function humanizeAgentId(agentId) {
|
||||||
|
if (!agentId) return '未知 Agent';
|
||||||
|
const normalized = String(agentId).trim();
|
||||||
|
const staticAgent = STATIC_AGENT_INDEX.get(normalized)?.agent;
|
||||||
|
if (staticAgent?.name) {
|
||||||
|
return staticAgent.name;
|
||||||
|
}
|
||||||
|
const label = normalized
|
||||||
|
.replace(/_/g, ' ')
|
||||||
|
.replace(/\b\w/g, (char) => char.toUpperCase());
|
||||||
|
return label || normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function inferAgentRole(agentId) {
|
||||||
|
const normalized = String(agentId || '').trim();
|
||||||
|
const staticAgent = STATIC_AGENT_INDEX.get(normalized)?.agent;
|
||||||
|
if (staticAgent?.role) {
|
||||||
|
return staticAgent.role;
|
||||||
|
}
|
||||||
|
if (normalized === 'portfolio_manager') return '投资经理';
|
||||||
|
if (normalized === 'risk_manager') return '风控经理';
|
||||||
|
if (normalized.endsWith('_analyst')) return '分析师';
|
||||||
|
return 'Agent';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildRuntimeAgentMeta(agentId, runtimeIndex = 0) {
|
||||||
|
const normalized = String(agentId || '').trim();
|
||||||
|
const staticAgent = STATIC_AGENT_INDEX.get(normalized)?.agent;
|
||||||
|
if (staticAgent) {
|
||||||
|
return staticAgent;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fallback = FALLBACK_AGENT_VISUALS[runtimeIndex % Math.max(FALLBACK_AGENT_VISUALS.length, 1)] || {
|
||||||
|
avatar: AGENTS[0].avatar,
|
||||||
|
colors: AGENTS[0].colors,
|
||||||
|
};
|
||||||
|
return {
|
||||||
|
id: normalized,
|
||||||
|
name: humanizeAgentId(normalized),
|
||||||
|
role: inferAgentRole(normalized),
|
||||||
|
avatar: fallback.avatar,
|
||||||
|
colors: fallback.colors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function sortRuntimeAgents(agents) {
|
||||||
|
const staticOrder = new Map(AGENTS.map((agent, index) => [agent.id, index]));
|
||||||
|
return [...agents].sort((left, right) => {
|
||||||
|
const leftId = String(left?.agent_id || left?.id || '').trim();
|
||||||
|
const rightId = String(right?.agent_id || right?.id || '').trim();
|
||||||
|
const leftStatic = staticOrder.has(leftId) ? staticOrder.get(leftId) : Number.MAX_SAFE_INTEGER;
|
||||||
|
const rightStatic = staticOrder.has(rightId) ? staticOrder.get(rightId) : Number.MAX_SAFE_INTEGER;
|
||||||
|
if (leftStatic !== rightStatic) {
|
||||||
|
return leftStatic - rightStatic;
|
||||||
|
}
|
||||||
|
return leftId.localeCompare(rightId);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function findAgentByIdOrName(agents, idOrName) {
|
||||||
|
if (!Array.isArray(agents) || !idOrName) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return agents.find((agent) => agent.id === idOrName || agent.name === idOrName) || null;
|
||||||
|
}
|
||||||
@@ -17,7 +17,11 @@ class NewsServiceClient:
|
|||||||
self._client: httpx.AsyncClient | None = None
|
self._client: httpx.AsyncClient | None = None
|
||||||
|
|
||||||
async def __aenter__(self) -> "NewsServiceClient":
|
async def __aenter__(self) -> "NewsServiceClient":
|
||||||
self._client = httpx.AsyncClient(base_url=self.base_url, timeout=30.0)
|
self._client = httpx.AsyncClient(
|
||||||
|
base_url=self.base_url,
|
||||||
|
timeout=90.0,
|
||||||
|
limits=httpx.Limits(max_connections=100, max_keepalive_connections=20)
|
||||||
|
)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
|
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
|
||||||
|
|||||||
@@ -21,7 +21,11 @@ class TradingServiceClient:
|
|||||||
self._client: httpx.AsyncClient | None = None
|
self._client: httpx.AsyncClient | None = None
|
||||||
|
|
||||||
async def __aenter__(self) -> "TradingServiceClient":
|
async def __aenter__(self) -> "TradingServiceClient":
|
||||||
self._client = httpx.AsyncClient(base_url=self.base_url, timeout=30.0)
|
self._client = httpx.AsyncClient(
|
||||||
|
base_url=self.base_url,
|
||||||
|
timeout=60.0,
|
||||||
|
limits=httpx.Limits(max_connections=100, max_keepalive_connections=20)
|
||||||
|
)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
|
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
|
||||||
|
|||||||
13
start.sh
@@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# 大时代 生产环境启动脚本
|
# 大时代 单机启动脚本
|
||||||
#
|
#
|
||||||
# 用法:
|
# 用法:
|
||||||
# ./start.sh # 构建前端 + 后台启动全部服务 (默认)
|
# ./start.sh # 构建前端 + 后台启动全部服务 (单机模式)
|
||||||
# ./start.sh --no-build # 跳过前端构建
|
# ./start.sh --no-build # 跳过前端构建
|
||||||
# ./start.sh --no-daemon # 前台运行 (不使用 nohup)
|
# ./start.sh --no-daemon # 前台运行 (不使用 nohup)
|
||||||
# ./start.sh stop # 停止所有后台服务
|
# ./start.sh stop # 停止所有后台服务
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
# WORKERS=2 # uvicorn worker 数 (默认: 2)
|
# WORKERS=2 # uvicorn worker 数 (默认: 2)
|
||||||
# GATEWAY_HOST=0.0.0.0 # Gateway 绑定地址
|
# GATEWAY_HOST=0.0.0.0 # Gateway 绑定地址
|
||||||
# GATEWAY_PORT=8765 # Gateway 端口
|
# GATEWAY_PORT=8765 # Gateway 端口
|
||||||
# FRONTEND_PORT=80 # 前端服务端口 (默认: 80)
|
# FRONTEND_PORT=8080 # 前端服务端口 (默认: 8080)
|
||||||
# ============================================================
|
# ============================================================
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@ cd "${SCRIPT_DIR}"
|
|||||||
WORKERS="${WORKERS:-2}"
|
WORKERS="${WORKERS:-2}"
|
||||||
GATEWAY_HOST="${GATEWAY_HOST:-0.0.0.0}"
|
GATEWAY_HOST="${GATEWAY_HOST:-0.0.0.0}"
|
||||||
GATEWAY_PORT="${GATEWAY_PORT:-8765}"
|
GATEWAY_PORT="${GATEWAY_PORT:-8765}"
|
||||||
FRONTEND_PORT="${FRONTEND_PORT:-80}"
|
FRONTEND_PORT="${FRONTEND_PORT:-8080}"
|
||||||
PID_DIR="${SCRIPT_DIR}/.pids"
|
PID_DIR="${SCRIPT_DIR}/.pids"
|
||||||
LOG_DIR="${SCRIPT_DIR}/logs"
|
LOG_DIR="${SCRIPT_DIR}/logs"
|
||||||
FRONTEND_DIST="${SCRIPT_DIR}/frontend/dist"
|
FRONTEND_DIST="${SCRIPT_DIR}/frontend/dist"
|
||||||
@@ -294,9 +294,12 @@ do_start() {
|
|||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "${CYAN}══════════════════════════════════════════${NC}"
|
echo -e "${CYAN}══════════════════════════════════════════${NC}"
|
||||||
echo -e "${CYAN} 大时代 · 生产环境启动${NC}"
|
echo -e "${CYAN} 大时代 · 单机启动${NC}"
|
||||||
echo -e "${CYAN}══════════════════════════════════════════${NC}"
|
echo -e "${CYAN}══════════════════════════════════════════${NC}"
|
||||||
echo ""
|
echo ""
|
||||||
|
echo -e "${YELLOW}说明:${NC} 当前脚本适合单机运行或演示环境。"
|
||||||
|
echo -e "${YELLOW}正式生产部署请优先使用 deploy/systemd + nginx 静态前端方案。${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
if ${DAEMON}; then
|
if ${DAEMON}; then
|
||||||
start_daemon
|
start_daemon
|
||||||
|
|||||||