Refactor assistant configuration management and update documentation

- Removed legacy agent profile settings from the .env.example and README, streamlining the configuration process.
- Introduced a new local YAML configuration adapter for assistant settings, allowing for easier management of assistant profiles.
- Updated backend integration documentation to clarify the behavior of assistant config sourcing based on backend URL settings.
- Adjusted various service implementations to directly utilize API keys from the new configuration structure.
- Enhanced test coverage for the new local YAML adapter and its integration with backend services.
This commit is contained in:
Xin Wang
2026-03-05 21:24:15 +08:00
parent d0a6419990
commit 935f2fbd1f
17 changed files with 585 additions and 739 deletions

View File

@@ -89,7 +89,7 @@ class DashScopeTTSService(BaseTTSService):
speed: float = 1.0,
):
super().__init__(voice=voice, sample_rate=sample_rate, speed=speed)
self.api_key = api_key or os.getenv("DASHSCOPE_API_KEY") or os.getenv("TTS_API_KEY")
self.api_key = api_key
self.api_url = (
api_url
or os.getenv("DASHSCOPE_TTS_API_URL")

View File

@@ -44,13 +44,13 @@ class OpenAILLMService(BaseLLMService):
Args:
model: Model name (e.g., "gpt-4o-mini", "gpt-4o")
api_key: Provider API key (defaults to LLM_API_KEY/OPENAI_API_KEY env vars)
api_key: Provider API key
base_url: Custom API base URL (for Azure or compatible APIs)
system_prompt: Default system prompt for conversations
"""
super().__init__(model=model)
self.api_key = api_key or os.getenv("LLM_API_KEY") or os.getenv("OPENAI_API_KEY")
self.api_key = api_key
self.base_url = base_url or os.getenv("LLM_API_URL") or os.getenv("OPENAI_API_URL")
self.system_prompt = system_prompt or (
"You are a helpful, friendly voice assistant. "

View File

@@ -75,7 +75,7 @@ class OpenAICompatibleASRService(BaseASRService):
if not AIOHTTP_AVAILABLE:
raise RuntimeError("aiohttp is required for OpenAICompatibleASRService")
self.api_key = api_key or os.getenv("ASR_API_KEY") or os.getenv("SILICONFLOW_API_KEY")
self.api_key = api_key
raw_api_url = api_url or os.getenv("ASR_API_URL") or self.API_URL
self.api_url = self._resolve_transcriptions_endpoint(raw_api_url)
self.model = self.MODELS.get(model.lower(), model)

View File

@@ -49,7 +49,7 @@ class OpenAICompatibleTTSService(BaseTTSService):
Initialize OpenAI-compatible TTS service.
Args:
api_key: Provider API key (defaults to TTS_API_KEY/SILICONFLOW_API_KEY env vars)
api_key: Provider API key
api_url: Provider API URL (defaults to SiliconFlow endpoint)
voice: Voice name (alex, anna, bella, benjamin, charles, claire, david, diana)
model: Model name
@@ -73,7 +73,7 @@ class OpenAICompatibleTTSService(BaseTTSService):
super().__init__(voice=full_voice, sample_rate=sample_rate, speed=speed)
self.api_key = api_key or os.getenv("TTS_API_KEY") or os.getenv("SILICONFLOW_API_KEY")
self.api_key = api_key
self.model = model
raw_api_url = api_url or os.getenv("TTS_API_URL") or "https://api.siliconflow.cn/v1/audio/speech"
self.api_url = self._resolve_speech_endpoint(raw_api_url)

View File

@@ -13,7 +13,6 @@ The Realtime API provides:
- Barge-in/interruption handling
"""
import os
import asyncio
import json
import base64
@@ -98,7 +97,6 @@ class RealtimeService:
config: Realtime configuration (uses defaults if not provided)
"""
self.config = config or RealtimeConfig()
self.config.api_key = self.config.api_key or os.getenv("OPENAI_API_KEY")
self.state = RealtimeState.DISCONNECTED
self._ws = None