Add configurable data providers and localize frontend UI

This commit is contained in:
2026-03-15 00:55:12 +08:00
parent 12de93aa30
commit d233a3f55d
38 changed files with 1936 additions and 1038 deletions

View File

@@ -3,9 +3,9 @@
AgentScope Native Model Factory
Uses native AgentScope model classes for LLM calls
"""
import os
from enum import Enum
from typing import Optional, Tuple
import os
from agentscope.formatter import (
AnthropicChatFormatter,
DashScopeChatFormatter,
@@ -20,6 +20,11 @@ from agentscope.model import (
OllamaChatModel,
OpenAIChatModel,
)
from backend.config.env_config import (
canonicalize_model_provider,
get_agent_model_config,
get_env_str,
)
class ModelProvider(Enum):
@@ -108,7 +113,7 @@ def create_model(
Returns:
AgentScope model instance
"""
provider = provider.upper()
provider = canonicalize_model_provider(provider)
model_class = PROVIDER_MODEL_MAP.get(provider)
if model_class is None:
@@ -138,19 +143,21 @@ def create_model(
# Handle custom OpenAI base URL
if provider == "OPENAI":
base_url = os.getenv("OPENAI_BASE_URL") or os.getenv("OPENAI_API_BASE")
base_url = get_env_str("OPENAI_BASE_URL") or get_env_str(
"OPENAI_API_BASE",
)
if base_url:
model_kwargs["client_args"] = {"base_url": base_url}
# Handle DashScope base URL (uses different parameter)
if provider in ("DASHSCOPE", "ALIBABA"):
base_url = os.getenv("DASHSCOPE_BASE_URL")
base_url = get_env_str("DASHSCOPE_BASE_URL")
if base_url:
model_kwargs["base_http_api_url"] = base_url
# Handle Ollama host
if provider == "OLLAMA":
host = os.getenv("OLLAMA_HOST")
host = get_env_str("OLLAMA_HOST")
if host:
model_kwargs["host"] = host
@@ -174,23 +181,11 @@ def get_agent_model(agent_id: str, stream: bool = False):
Returns:
AgentScope model instance
"""
# Normalize agent_id to uppercase for env var lookup
agent_key = agent_id.upper().replace("-", "_")
# Try agent-specific config first
model_name = os.getenv(f"AGENT_{agent_key}_MODEL_NAME")
provider = os.getenv(f"AGENT_{agent_key}_MODEL_PROVIDER")
print(f"Using specific model {model_name} for agent {agent_key}")
# Fall back to global config
if not model_name:
model_name = os.getenv("MODEL_NAME", "gpt-4o")
if not provider:
provider = os.getenv("MODEL_PROVIDER", "OPENAI")
resolved = get_agent_model_config(agent_id)
return create_model(
model_name=model_name,
provider=provider,
model_name=resolved.model_name,
provider=resolved.provider,
stream=stream,
)
@@ -205,17 +200,7 @@ def get_agent_formatter(agent_id: str):
Returns:
AgentScope formatter instance
"""
# Normalize agent_id to uppercase for env var lookup
agent_key = agent_id.upper().replace("-", "_")
# Try agent-specific config first
provider = os.getenv(f"AGENT_{agent_key}_MODEL_PROVIDER")
# Fall back to global config
if not provider:
provider = os.getenv("MODEL_PROVIDER", "OPENAI")
provider = provider.upper()
provider = get_agent_model_config(agent_id).provider
formatter_class = PROVIDER_FORMATTER_MAP.get(provider, OpenAIChatFormatter)
return formatter_class()
@@ -230,14 +215,5 @@ def get_agent_model_info(agent_id: str) -> Tuple[str, str]:
Returns:
Tuple of (model_name, provider_name)
"""
agent_key = agent_id.upper().replace("-", "_")
model_name = os.getenv(f"AGENT_{agent_key}_MODEL_NAME")
provider = os.getenv(f"AGENT_{agent_key}_MODEL_PROVIDER")
if not model_name:
model_name = os.getenv("MODEL_NAME", "gpt-4o")
if not provider:
provider = os.getenv("MODEL_PROVIDER", "OPENAI")
return model_name, provider.upper()
resolved = get_agent_model_config(agent_id)
return resolved.model_name, resolved.provider