diff --git a/web/pages/Assistants.tsx b/web/pages/Assistants.tsx index 5529d68..860a0c9 100644 --- a/web/pages/Assistants.tsx +++ b/web/pages/Assistants.tsx @@ -3,7 +3,7 @@ import React, { useState, useEffect, useMemo, useRef } from 'react'; import { Plus, Search, Play, Copy, Trash2, Mic, MessageSquare, Save, Video, PhoneOff, Camera, ArrowLeftRight, Send, Phone, Rocket, AlertTriangle, PhoneCall, CameraOff, Image, Images, CloudSun, Calendar, TrendingUp, Coins, Wrench, Globe, Terminal, X, ClipboardCheck, Sparkles, Volume2, Timer, ChevronDown, Database, Server, Zap, ExternalLink, Key, BrainCircuit, Ear, Book, Filter } from 'lucide-react'; import { Button, Input, Badge, Drawer, Dialog } from '../components/UI'; import { ASRModel, Assistant, KnowledgeBase, LLMModel, TabValue, Tool, Voice } from '../types'; -import { createAssistant, deleteAssistant, fetchASRModels, fetchAssistants, fetchKnowledgeBases, fetchLLMModels, fetchTools, fetchVoices, generateAssistantOpenerAudio, updateAssistant as updateAssistantApi } from '../services/backendApi'; +import { createAssistant, deleteAssistant, fetchASRModels, fetchAssistantOpenerAudioPcmBuffer, fetchAssistants, fetchKnowledgeBases, fetchLLMModels, fetchTools, fetchVoices, generateAssistantOpenerAudio, updateAssistant as updateAssistantApi } from '../services/backendApi'; const isOpenAICompatibleVendor = (vendor?: string) => { const normalized = String(vendor || '').trim().toLowerCase(); @@ -109,6 +109,9 @@ export const AssistantsPage: React.FC = () => { const [persistedAssistantSnapshotById, setPersistedAssistantSnapshotById] = useState>({}); const [unsavedDebugConfirmOpen, setUnsavedDebugConfirmOpen] = useState(false); const [openerAudioGenerating, setOpenerAudioGenerating] = useState(false); + const [openerAudioPreviewing, setOpenerAudioPreviewing] = useState(false); + const openerPreviewAudioCtxRef = useRef(null); + const openerPreviewSourceRef = useRef(null); const selectedAssistant = assistants.find(a => a.id === selectedId) || null; const serializeAssistant = (assistant: Assistant) => JSON.stringify(assistant); @@ -296,6 +299,71 @@ export const AssistantsPage: React.FC = () => { } }; + const stopOpenerAudioPreview = () => { + if (openerPreviewSourceRef.current) { + try { + openerPreviewSourceRef.current.stop(); + } catch { + // no-op + } + try { + openerPreviewSourceRef.current.disconnect(); + } catch { + // no-op + } + openerPreviewSourceRef.current = null; + } + setOpenerAudioPreviewing(false); + }; + + const handlePreviewOpenerAudio = async () => { + if (!selectedAssistant?.id || !selectedAssistant.openerAudioReady) return; + try { + stopOpenerAudioPreview(); + const pcmBuffer = await fetchAssistantOpenerAudioPcmBuffer(selectedAssistant.id); + const int16 = new Int16Array(pcmBuffer); + if (int16.length === 0) return; + + let ctx = openerPreviewAudioCtxRef.current; + if (!ctx) { + ctx = new AudioContext(); + openerPreviewAudioCtxRef.current = ctx; + } + if (ctx.state === 'suspended') { + await ctx.resume(); + } + + const float32 = new Float32Array(int16.length); + for (let i = 0; i < int16.length; i += 1) { + float32[i] = int16[i] / 32768; + } + const audioBuffer = ctx.createBuffer(1, float32.length, 16000); + audioBuffer.copyToChannel(float32, 0); + + const source = ctx.createBufferSource(); + source.buffer = audioBuffer; + source.connect(ctx.destination); + source.onended = () => { + if (openerPreviewSourceRef.current === source) { + openerPreviewSourceRef.current = null; + setOpenerAudioPreviewing(false); + } + try { + source.disconnect(); + } catch { + // no-op + } + }; + openerPreviewSourceRef.current = source; + setOpenerAudioPreviewing(true); + source.start(); + } catch (error) { + console.error(error); + setOpenerAudioPreviewing(false); + alert((error as Error)?.message || '预览预加载开场音频失败'); + } + }; + const handleConfirmOpenDebug = () => { setUnsavedDebugConfirmOpen(false); setDebugOpen(true); @@ -342,6 +410,23 @@ export const AssistantsPage: React.FC = () => { const canAdjustInterruptionSensitivity = selectedAssistant?.botCannotBeInterrupted !== true; const isBotFirstTurn = selectedAssistant?.firstTurnMode !== 'user_first'; + useEffect(() => { + return () => { + if (openerPreviewSourceRef.current) { + try { + openerPreviewSourceRef.current.stop(); + } catch { + // no-op + } + openerPreviewSourceRef.current = null; + } + if (openerPreviewAudioCtxRef.current) { + void openerPreviewAudioCtxRef.current.close(); + openerPreviewAudioCtxRef.current = null; + } + }; + }, []); + return (
{/* LEFT COLUMN: List */} @@ -742,14 +827,29 @@ export const AssistantsPage: React.FC = () => { ? `已生成 (${Math.round((selectedAssistant.openerAudioDurationMs || 0) / 1000)}s)` : '未生成'}

- +
+ + {openerAudioPreviewing && ( + + )} + +

使用当前 TTS 配置生成并保存到后端;引擎可直接播放以降低首包延迟。 diff --git a/web/services/backendApi.ts b/web/services/backendApi.ts index 8404e9b..c248269 100644 --- a/web/services/backendApi.ts +++ b/web/services/backendApi.ts @@ -331,6 +331,15 @@ export const generateAssistantOpenerAudio = async ( }); }; +export const fetchAssistantOpenerAudioPcmBuffer = async (assistantId: string): Promise => { + const url = `${getApiBaseUrl()}/assistants/${assistantId}/opener-audio/pcm`; + const response = await fetch(url, { method: 'GET' }); + if (!response.ok) { + throw new Error(`Failed to fetch opener audio: ${response.status}`); + } + return response.arrayBuffer(); +}; + export const fetchVoices = async (): Promise => { const response = await apiRequest<{ list?: AnyRecord[] } | AnyRecord[]>(withLimit('/voices')); const list = Array.isArray(response) ? response : (response.list || []);