Unify db api

This commit is contained in:
Xin Wang
2026-02-26 01:58:39 +08:00
parent 56f8aa2191
commit 72ed7d0512
40 changed files with 3926 additions and 593 deletions

8
api/.gitignore vendored
View File

@@ -36,8 +36,12 @@ env/
*.sqlite
*.sqlite3
# Vector store data
data/vector_store/
# Runtime data (SQLite, vector store, uploads, generated artifacts)
data/**
!data/
!data/.gitkeep
!data/vector_store/
data/vector_store/**
!data/vector_store/.gitkeep
# IDE

View File

@@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import Optional
from typing import Any, Dict, List, Optional
import uuid
from datetime import datetime
from ..db import get_db
from ..models import Assistant, LLMModel, ASRModel, Voice
from ..schemas import (
AssistantCreate, AssistantUpdate, AssistantOut
AssistantCreate, AssistantUpdate, AssistantOut, AssistantEngineConfigResponse
)
router = APIRouter(prefix="/assistants", tags=["Assistants"])
@@ -52,8 +52,13 @@ def _normalize_openai_compatible_voice_key(voice_value: str, model: str) -> str:
return f"{model_name}:{voice_id}"
def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
metadata = {
def _config_version_id(assistant: Assistant) -> str:
updated = assistant.updated_at or assistant.created_at or datetime.utcnow()
return f"asst_{assistant.id}_{updated.strftime('%Y%m%d%H%M%S')}"
def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> tuple[Dict[str, Any], List[str]]:
metadata: Dict[str, Any] = {
"systemPrompt": assistant.prompt or "",
"firstTurnMode": assistant.first_turn_mode or "bot_first",
"greeting": assistant.opener or "",
@@ -64,10 +69,29 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"minDurationMs": int(assistant.interruption_sensitivity or 500),
},
"services": {},
"tools": assistant.tools or [],
"history": {
"assistantId": assistant.id,
"userId": int(assistant.user_id or 1),
"source": "debug",
},
}
warnings = []
warnings: List[str] = []
if assistant.llm_model_id:
config_mode = str(assistant.config_mode or "platform").strip().lower()
if config_mode in {"dify", "fastgpt"}:
metadata["services"]["llm"] = {
"provider": "openai",
"model": "",
"apiKey": assistant.api_key,
"baseUrl": assistant.api_url,
}
if not (assistant.api_url or "").strip():
warnings.append(f"External LLM API URL is empty for mode: {assistant.config_mode}")
if not (assistant.api_key or "").strip():
warnings.append(f"External LLM API key is empty for mode: {assistant.config_mode}")
elif assistant.llm_model_id:
llm = db.query(LLMModel).filter(LLMModel.id == assistant.llm_model_id).first()
if llm:
metadata["services"]["llm"] = {
@@ -87,6 +111,7 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"provider": asr_provider,
"model": asr.model_name or asr.name,
"apiKey": asr.api_key if asr_provider == "openai_compatible" else None,
"baseUrl": asr.base_url if asr_provider == "openai_compatible" else None,
}
else:
warnings.append(f"ASR model not found: {assistant.asr_model_id}")
@@ -107,6 +132,7 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"provider": tts_provider,
"model": model,
"apiKey": voice.api_key if tts_provider == "openai_compatible" else None,
"baseUrl": voice.base_url if tts_provider == "openai_compatible" else None,
"voice": runtime_voice,
"speed": assistant.speed or voice.speed,
}
@@ -126,10 +152,21 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> dict:
"kbId": assistant.knowledge_base_id,
"nResults": 5,
}
return metadata, warnings
def _build_engine_assistant_config(db: Session, assistant: Assistant) -> Dict[str, Any]:
session_metadata, warnings = _resolve_runtime_metadata(db, assistant)
config_version_id = _config_version_id(assistant)
assistant_cfg = dict(session_metadata)
assistant_cfg["assistantId"] = assistant.id
assistant_cfg["configVersionId"] = config_version_id
return {
"assistantId": assistant.id,
"sessionStartMetadata": metadata,
"configVersionId": config_version_id,
"assistant": assistant_cfg,
"sessionStartMetadata": session_metadata,
"sources": {
"llmModelId": assistant.llm_model_id,
"asrModelId": assistant.asr_model_id,
@@ -219,13 +256,22 @@ def get_assistant(id: str, db: Session = Depends(get_db)):
return assistant_to_dict(assistant)
@router.get("/{id}/runtime-config")
def get_assistant_runtime_config(id: str, db: Session = Depends(get_db)):
"""Resolve assistant runtime config for engine WS session.start metadata."""
@router.get("/{id}/config", response_model=AssistantEngineConfigResponse)
def get_assistant_config(id: str, db: Session = Depends(get_db)):
"""Canonical engine config endpoint consumed by engine backend adapter."""
assistant = db.query(Assistant).filter(Assistant.id == id).first()
if not assistant:
raise HTTPException(status_code=404, detail="Assistant not found")
return _resolve_runtime_metadata(db, assistant)
return _build_engine_assistant_config(db, assistant)
@router.get("/{id}/runtime-config", response_model=AssistantEngineConfigResponse)
def get_assistant_runtime_config(id: str, db: Session = Depends(get_db)):
"""Legacy alias for resolved engine runtime config."""
assistant = db.query(Assistant).filter(Assistant.id == id).first()
if not assistant:
raise HTTPException(status_code=404, detail="Assistant not found")
return _build_engine_assistant_config(db, assistant)
@router.post("", response_model=AssistantOut)

View File

@@ -333,6 +333,35 @@ class AssistantOut(AssistantBase):
from_attributes = True
class AssistantRuntimeMetadata(BaseModel):
"""Canonical runtime metadata payload consumed by engine session.start."""
model_config = ConfigDict(extra="allow")
systemPrompt: str = ""
firstTurnMode: str = "bot_first"
greeting: str = ""
generatedOpenerEnabled: bool = False
output: Dict[str, Any] = Field(default_factory=dict)
bargeIn: Dict[str, Any] = Field(default_factory=dict)
services: Dict[str, Dict[str, Any]] = Field(default_factory=dict)
tools: List[Any] = Field(default_factory=list)
knowledgeBaseId: Optional[str] = None
knowledge: Dict[str, Any] = Field(default_factory=dict)
history: Dict[str, Any] = Field(default_factory=dict)
assistantId: Optional[str] = None
configVersionId: Optional[str] = None
class AssistantEngineConfigResponse(BaseModel):
assistantId: str
configVersionId: Optional[str] = None
assistant: AssistantRuntimeMetadata
sessionStartMetadata: AssistantRuntimeMetadata
sources: Dict[str, Optional[str]] = Field(default_factory=dict)
warnings: List[str] = Field(default_factory=list)
class AssistantStats(BaseModel):
assistant_id: str
total_calls: int = 0

View File

@@ -183,12 +183,16 @@ class TestAssistantAPI:
def test_get_runtime_config(self, client, sample_assistant_data, sample_llm_model_data, sample_asr_model_data, sample_voice_data):
"""Test resolved runtime config endpoint for WS session.start metadata."""
sample_asr_model_data["vendor"] = "OpenAI Compatible"
llm_resp = client.post("/api/llm", json=sample_llm_model_data)
assert llm_resp.status_code == 200
asr_resp = client.post("/api/asr", json=sample_asr_model_data)
assert asr_resp.status_code == 200
sample_voice_data["vendor"] = "OpenAI Compatible"
sample_voice_data["base_url"] = "https://tts.example.com/v1/audio/speech"
sample_voice_data["api_key"] = "test-voice-key"
voice_resp = client.post("/api/voices", json=sample_voice_data)
assert voice_resp.status_code == 200
voice_id = voice_resp.json()["id"]
@@ -215,7 +219,26 @@ class TestAssistantAPI:
assert metadata["greeting"] == "runtime opener"
assert metadata["services"]["llm"]["model"] == sample_llm_model_data["model_name"]
assert metadata["services"]["asr"]["model"] == sample_asr_model_data["model_name"]
assert metadata["services"]["asr"]["baseUrl"] == sample_asr_model_data["base_url"]
assert metadata["services"]["tts"]["voice"] == sample_voice_data["voice_key"]
assert metadata["services"]["tts"]["baseUrl"] == sample_voice_data["base_url"]
def test_get_engine_config_endpoint(self, client, sample_assistant_data):
"""Test canonical assistant config endpoint consumed by engine backend adapter."""
assistant_resp = client.post("/api/assistants", json=sample_assistant_data)
assert assistant_resp.status_code == 200
assistant_id = assistant_resp.json()["id"]
config_resp = client.get(f"/api/assistants/{assistant_id}/config")
assert config_resp.status_code == 200
payload = config_resp.json()
assert payload["assistantId"] == assistant_id
assert payload["assistant"]["assistantId"] == assistant_id
assert payload["assistant"]["configVersionId"].startswith(f"asst_{assistant_id}_")
assert payload["assistant"]["systemPrompt"] == sample_assistant_data["prompt"]
assert payload["sessionStartMetadata"]["systemPrompt"] == sample_assistant_data["prompt"]
assert payload["sessionStartMetadata"]["history"]["assistantId"] == assistant_id
def test_runtime_config_text_mode_when_voice_output_disabled(self, client, sample_assistant_data):
sample_assistant_data["voiceOutputEnabled"] = False