Implement WS v1 protocol and runtime-config powered debug drawer
This commit is contained in:
@@ -5,7 +5,7 @@ import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from ..db import get_db
|
||||
from ..models import Assistant
|
||||
from ..models import Assistant, LLMModel, ASRModel, Voice
|
||||
from ..schemas import (
|
||||
AssistantCreate, AssistantUpdate, AssistantOut
|
||||
)
|
||||
@@ -13,6 +13,73 @@ from ..schemas import (
|
||||
router = APIRouter(prefix="/assistants", tags=["Assistants"])
|
||||
|
||||
|
||||
def _is_siliconflow_vendor(vendor: Optional[str]) -> bool:
|
||||
return (vendor or "").strip().lower() in {"siliconflow", "硅基流动"}
|
||||
|
||||
|
||||
def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
|
||||
metadata = {
|
||||
"systemPrompt": assistant.prompt or "",
|
||||
"greeting": assistant.opener or "",
|
||||
"services": {},
|
||||
}
|
||||
warnings = []
|
||||
|
||||
if assistant.llm_model_id:
|
||||
llm = db.query(LLMModel).filter(LLMModel.id == assistant.llm_model_id).first()
|
||||
if llm:
|
||||
metadata["services"]["llm"] = {
|
||||
"provider": "openai",
|
||||
"model": llm.model_name or llm.name,
|
||||
"apiKey": llm.api_key,
|
||||
"baseUrl": llm.base_url,
|
||||
}
|
||||
else:
|
||||
warnings.append(f"LLM model not found: {assistant.llm_model_id}")
|
||||
|
||||
if assistant.asr_model_id:
|
||||
asr = db.query(ASRModel).filter(ASRModel.id == assistant.asr_model_id).first()
|
||||
if asr:
|
||||
asr_provider = "siliconflow" if _is_siliconflow_vendor(asr.vendor) else "buffered"
|
||||
metadata["services"]["asr"] = {
|
||||
"provider": asr_provider,
|
||||
"model": asr.model_name or asr.name,
|
||||
"apiKey": asr.api_key if asr_provider == "siliconflow" else None,
|
||||
}
|
||||
else:
|
||||
warnings.append(f"ASR model not found: {assistant.asr_model_id}")
|
||||
|
||||
if assistant.voice:
|
||||
voice = db.query(Voice).filter(Voice.id == assistant.voice).first()
|
||||
if voice:
|
||||
tts_provider = "siliconflow" if _is_siliconflow_vendor(voice.vendor) else "edge"
|
||||
metadata["services"]["tts"] = {
|
||||
"provider": tts_provider,
|
||||
"model": voice.model,
|
||||
"apiKey": voice.api_key if tts_provider == "siliconflow" else None,
|
||||
"voice": voice.voice_key or voice.id,
|
||||
"speed": assistant.speed or voice.speed,
|
||||
}
|
||||
else:
|
||||
# Keep assistant.voice as direct voice identifier fallback
|
||||
metadata["services"]["tts"] = {
|
||||
"voice": assistant.voice,
|
||||
"speed": assistant.speed or 1.0,
|
||||
}
|
||||
warnings.append(f"Voice resource not found: {assistant.voice}")
|
||||
|
||||
return {
|
||||
"assistantId": assistant.id,
|
||||
"sessionStartMetadata": metadata,
|
||||
"sources": {
|
||||
"llmModelId": assistant.llm_model_id,
|
||||
"asrModelId": assistant.asr_model_id,
|
||||
"voiceId": assistant.voice,
|
||||
},
|
||||
"warnings": warnings,
|
||||
}
|
||||
|
||||
|
||||
def assistant_to_dict(assistant: Assistant) -> dict:
|
||||
return {
|
||||
"id": assistant.id,
|
||||
@@ -84,6 +151,15 @@ def get_assistant(id: str, db: Session = Depends(get_db)):
|
||||
return assistant_to_dict(assistant)
|
||||
|
||||
|
||||
@router.get("/{id}/runtime-config")
|
||||
def get_assistant_runtime_config(id: str, db: Session = Depends(get_db)):
|
||||
"""Resolve assistant runtime config for engine WS session.start metadata."""
|
||||
assistant = db.query(Assistant).filter(Assistant.id == id).first()
|
||||
if not assistant:
|
||||
raise HTTPException(status_code=404, detail="Assistant not found")
|
||||
return _resolve_runtime_metadata(db, assistant)
|
||||
|
||||
|
||||
@router.post("", response_model=AssistantOut)
|
||||
def create_assistant(data: AssistantCreate, db: Session = Depends(get_db)):
|
||||
"""创建新助手"""
|
||||
@@ -139,4 +215,3 @@ def delete_assistant(id: str, db: Session = Depends(get_db)):
|
||||
db.delete(assistant)
|
||||
db.commit()
|
||||
return {"message": "Deleted successfully"}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user