Add audio preview functionality for assistant opener audio in AssistantsPage. Implement controls for previewing and stopping audio playback, and integrate new API endpoint for fetching PCM buffer. Enhance user interface with updated button states for audio actions.

This commit is contained in:
Xin Wang
2026-02-26 16:15:31 +08:00
parent fb95e2abe2
commit 0de6fe529e
2 changed files with 118 additions and 9 deletions

View File

@@ -3,7 +3,7 @@ import React, { useState, useEffect, useMemo, useRef } from 'react';
import { Plus, Search, Play, Copy, Trash2, Mic, MessageSquare, Save, Video, PhoneOff, Camera, ArrowLeftRight, Send, Phone, Rocket, AlertTriangle, PhoneCall, CameraOff, Image, Images, CloudSun, Calendar, TrendingUp, Coins, Wrench, Globe, Terminal, X, ClipboardCheck, Sparkles, Volume2, Timer, ChevronDown, Database, Server, Zap, ExternalLink, Key, BrainCircuit, Ear, Book, Filter } from 'lucide-react';
import { Button, Input, Badge, Drawer, Dialog } from '../components/UI';
import { ASRModel, Assistant, KnowledgeBase, LLMModel, TabValue, Tool, Voice } from '../types';
import { createAssistant, deleteAssistant, fetchASRModels, fetchAssistants, fetchKnowledgeBases, fetchLLMModels, fetchTools, fetchVoices, generateAssistantOpenerAudio, updateAssistant as updateAssistantApi } from '../services/backendApi';
import { createAssistant, deleteAssistant, fetchASRModels, fetchAssistantOpenerAudioPcmBuffer, fetchAssistants, fetchKnowledgeBases, fetchLLMModels, fetchTools, fetchVoices, generateAssistantOpenerAudio, updateAssistant as updateAssistantApi } from '../services/backendApi';
const isOpenAICompatibleVendor = (vendor?: string) => {
const normalized = String(vendor || '').trim().toLowerCase();
@@ -109,6 +109,9 @@ export const AssistantsPage: React.FC = () => {
const [persistedAssistantSnapshotById, setPersistedAssistantSnapshotById] = useState<Record<string, string>>({});
const [unsavedDebugConfirmOpen, setUnsavedDebugConfirmOpen] = useState(false);
const [openerAudioGenerating, setOpenerAudioGenerating] = useState(false);
const [openerAudioPreviewing, setOpenerAudioPreviewing] = useState(false);
const openerPreviewAudioCtxRef = useRef<AudioContext | null>(null);
const openerPreviewSourceRef = useRef<AudioBufferSourceNode | null>(null);
const selectedAssistant = assistants.find(a => a.id === selectedId) || null;
const serializeAssistant = (assistant: Assistant) => JSON.stringify(assistant);
@@ -296,6 +299,71 @@ export const AssistantsPage: React.FC = () => {
}
};
const stopOpenerAudioPreview = () => {
if (openerPreviewSourceRef.current) {
try {
openerPreviewSourceRef.current.stop();
} catch {
// no-op
}
try {
openerPreviewSourceRef.current.disconnect();
} catch {
// no-op
}
openerPreviewSourceRef.current = null;
}
setOpenerAudioPreviewing(false);
};
const handlePreviewOpenerAudio = async () => {
if (!selectedAssistant?.id || !selectedAssistant.openerAudioReady) return;
try {
stopOpenerAudioPreview();
const pcmBuffer = await fetchAssistantOpenerAudioPcmBuffer(selectedAssistant.id);
const int16 = new Int16Array(pcmBuffer);
if (int16.length === 0) return;
let ctx = openerPreviewAudioCtxRef.current;
if (!ctx) {
ctx = new AudioContext();
openerPreviewAudioCtxRef.current = ctx;
}
if (ctx.state === 'suspended') {
await ctx.resume();
}
const float32 = new Float32Array(int16.length);
for (let i = 0; i < int16.length; i += 1) {
float32[i] = int16[i] / 32768;
}
const audioBuffer = ctx.createBuffer(1, float32.length, 16000);
audioBuffer.copyToChannel(float32, 0);
const source = ctx.createBufferSource();
source.buffer = audioBuffer;
source.connect(ctx.destination);
source.onended = () => {
if (openerPreviewSourceRef.current === source) {
openerPreviewSourceRef.current = null;
setOpenerAudioPreviewing(false);
}
try {
source.disconnect();
} catch {
// no-op
}
};
openerPreviewSourceRef.current = source;
setOpenerAudioPreviewing(true);
source.start();
} catch (error) {
console.error(error);
setOpenerAudioPreviewing(false);
alert((error as Error)?.message || '预览预加载开场音频失败');
}
};
const handleConfirmOpenDebug = () => {
setUnsavedDebugConfirmOpen(false);
setDebugOpen(true);
@@ -342,6 +410,23 @@ export const AssistantsPage: React.FC = () => {
const canAdjustInterruptionSensitivity = selectedAssistant?.botCannotBeInterrupted !== true;
const isBotFirstTurn = selectedAssistant?.firstTurnMode !== 'user_first';
useEffect(() => {
return () => {
if (openerPreviewSourceRef.current) {
try {
openerPreviewSourceRef.current.stop();
} catch {
// no-op
}
openerPreviewSourceRef.current = null;
}
if (openerPreviewAudioCtxRef.current) {
void openerPreviewAudioCtxRef.current.close();
openerPreviewAudioCtxRef.current = null;
}
};
}, []);
return (
<div className="flex h-full min-h-0 gap-6 animate-in fade-in">
{/* LEFT COLUMN: List */}
@@ -742,14 +827,29 @@ export const AssistantsPage: React.FC = () => {
? `已生成 (${Math.round((selectedAssistant.openerAudioDurationMs || 0) / 1000)}s)`
: '未生成'}
</p>
<Button
variant="secondary"
size="sm"
onClick={handleGenerateOpenerAudio}
disabled={openerAudioGenerating || selectedAssistant.generatedOpenerEnabled === true}
>
{openerAudioGenerating ? '生成中...' : '生成开场预加载音频'}
</Button>
<div className="flex items-center gap-2">
<Button
variant="secondary"
size="sm"
onClick={handlePreviewOpenerAudio}
disabled={!selectedAssistant.openerAudioReady || openerAudioGenerating}
>
{openerAudioPreviewing ? '播放中...' : '预览预加载音频'}
</Button>
{openerAudioPreviewing && (
<Button variant="ghost" size="sm" onClick={stopOpenerAudioPreview}>
</Button>
)}
<Button
variant="secondary"
size="sm"
onClick={handleGenerateOpenerAudio}
disabled={openerAudioGenerating || selectedAssistant.generatedOpenerEnabled === true}
>
{openerAudioGenerating ? '生成中...' : '生成开场预加载音频'}
</Button>
</div>
</div>
<p className="text-[11px] text-muted-foreground">
使 TTS