Unify db api

This commit is contained in:
Xin Wang
2026-02-26 01:58:39 +08:00
parent 56f8aa2191
commit 72ed7d0512
40 changed files with 3926 additions and 593 deletions

View File

@@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import Optional
from typing import Any, Dict, List, Optional
import uuid
from datetime import datetime
from ..db import get_db
from ..models import Assistant, LLMModel, ASRModel, Voice
from ..schemas import (
AssistantCreate, AssistantUpdate, AssistantOut
AssistantCreate, AssistantUpdate, AssistantOut, AssistantEngineConfigResponse
)
router = APIRouter(prefix="/assistants", tags=["Assistants"])
@@ -52,8 +52,13 @@ def _normalize_openai_compatible_voice_key(voice_value: str, model: str) -> str:
return f"{model_name}:{voice_id}"
def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
metadata = {
def _config_version_id(assistant: Assistant) -> str:
updated = assistant.updated_at or assistant.created_at or datetime.utcnow()
return f"asst_{assistant.id}_{updated.strftime('%Y%m%d%H%M%S')}"
def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> tuple[Dict[str, Any], List[str]]:
metadata: Dict[str, Any] = {
"systemPrompt": assistant.prompt or "",
"firstTurnMode": assistant.first_turn_mode or "bot_first",
"greeting": assistant.opener or "",
@@ -64,10 +69,29 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"minDurationMs": int(assistant.interruption_sensitivity or 500),
},
"services": {},
"tools": assistant.tools or [],
"history": {
"assistantId": assistant.id,
"userId": int(assistant.user_id or 1),
"source": "debug",
},
}
warnings = []
warnings: List[str] = []
if assistant.llm_model_id:
config_mode = str(assistant.config_mode or "platform").strip().lower()
if config_mode in {"dify", "fastgpt"}:
metadata["services"]["llm"] = {
"provider": "openai",
"model": "",
"apiKey": assistant.api_key,
"baseUrl": assistant.api_url,
}
if not (assistant.api_url or "").strip():
warnings.append(f"External LLM API URL is empty for mode: {assistant.config_mode}")
if not (assistant.api_key or "").strip():
warnings.append(f"External LLM API key is empty for mode: {assistant.config_mode}")
elif assistant.llm_model_id:
llm = db.query(LLMModel).filter(LLMModel.id == assistant.llm_model_id).first()
if llm:
metadata["services"]["llm"] = {
@@ -87,6 +111,7 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"provider": asr_provider,
"model": asr.model_name or asr.name,
"apiKey": asr.api_key if asr_provider == "openai_compatible" else None,
"baseUrl": asr.base_url if asr_provider == "openai_compatible" else None,
}
else:
warnings.append(f"ASR model not found: {assistant.asr_model_id}")
@@ -107,6 +132,7 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"provider": tts_provider,
"model": model,
"apiKey": voice.api_key if tts_provider == "openai_compatible" else None,
"baseUrl": voice.base_url if tts_provider == "openai_compatible" else None,
"voice": runtime_voice,
"speed": assistant.speed or voice.speed,
}
@@ -126,10 +152,21 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"kbId": assistant.knowledge_base_id,
"nResults": 5,
}
return metadata, warnings
def _build_engine_assistant_config(db: Session, assistant: Assistant) -> Dict[str, Any]:
session_metadata, warnings = _resolve_runtime_metadata(db, assistant)
config_version_id = _config_version_id(assistant)
assistant_cfg = dict(session_metadata)
assistant_cfg["assistantId"] = assistant.id
assistant_cfg["configVersionId"] = config_version_id
return {
"assistantId": assistant.id,
"sessionStartMetadata": metadata,
"configVersionId": config_version_id,
"assistant": assistant_cfg,
"sessionStartMetadata": session_metadata,
"sources": {
"llmModelId": assistant.llm_model_id,
"asrModelId": assistant.asr_model_id,
@@ -219,13 +256,22 @@ def get_assistant(id: str, db: Session = Depends(get_db)):
return assistant_to_dict(assistant)
@router.get("/{id}/runtime-config")
def get_assistant_runtime_config(id: str, db: Session = Depends(get_db)):
"""Resolve assistant runtime config for engine WS session.start metadata."""
@router.get("/{id}/config", response_model=AssistantEngineConfigResponse)
def get_assistant_config(id: str, db: Session = Depends(get_db)):
"""Canonical engine config endpoint consumed by engine backend adapter."""
assistant = db.query(Assistant).filter(Assistant.id == id).first()
if not assistant:
raise HTTPException(status_code=404, detail="Assistant not found")
return _resolve_runtime_metadata(db, assistant)
return _build_engine_assistant_config(db, assistant)
@router.get("/{id}/runtime-config", response_model=AssistantEngineConfigResponse)
def get_assistant_runtime_config(id: str, db: Session = Depends(get_db)):
"""Legacy alias for resolved engine runtime config."""
assistant = db.query(Assistant).filter(Assistant.id == id).first()
if not assistant:
raise HTTPException(status_code=404, detail="Assistant not found")
return _build_engine_assistant_config(db, assistant)
@router.post("", response_model=AssistantOut)