"use client"; import { LoadingSVG } from "@/components/button/LoadingSVG"; import { ChatMessageType } from "@/components/chat/ChatTile"; import { ColorPicker } from "@/components/colorPicker/ColorPicker"; import { AudioInputTile } from "@/components/config/AudioInputTile"; import { ConfigurationPanelItem } from "@/components/config/ConfigurationPanelItem"; import { NameValueRow } from "@/components/config/NameValueRow"; import { PhoneSimulator } from "@/components/playground/PhoneSimulator"; import { PlaygroundHeader } from "@/components/playground/PlaygroundHeader"; import { PlaygroundTab, PlaygroundTabbedTile, PlaygroundTile, } from "@/components/playground/PlaygroundTile"; import { useConfig } from "@/hooks/useConfig"; import { TranscriptionTile } from "@/transcriptions/TranscriptionTile"; import { BarVisualizer, VideoTrack, useConnectionState, useDataChannel, useLocalParticipant, useRoomInfo, useTracks, useVoiceAssistant, useRoomContext, useParticipantAttributes, useChat, } from "@livekit/components-react"; import { ConnectionState, LocalParticipant, Track, RpcError, RpcInvocationData } from "livekit-client"; import { QRCodeSVG } from "qrcode.react"; import { ReactNode, useCallback, useEffect, useMemo, useRef, useState } from "react"; import tailwindTheme from "../../lib/tailwindTheme.preval"; import { EditableNameValueRow } from "@/components/config/NameValueRow"; import { AttributesInspector } from "@/components/config/AttributesInspector"; import { RpcPanel } from "./RpcPanel"; export interface PlaygroundMeta { name: string; value: string; } export interface PlaygroundProps { logo?: ReactNode; themeColors: string[]; onConnect: (connect: boolean, opts?: { token: string; url: string }) => void; } const headerHeight = 56; export default function Playground({ logo, themeColors, onConnect, }: PlaygroundProps) { const { config, setUserSettings } = useConfig(); const { name } = useRoomInfo(); const [transcripts, setTranscripts] = useState([]); const { localParticipant } = useLocalParticipant(); const { send: sendChat, chatMessages } = useChat(); const voiceAssistant = useVoiceAssistant(); const roomState = useConnectionState(); const tracks = useTracks(); const room = useRoomContext(); const [phoneMode, setPhoneMode] = useState<"normal" | "capture" | "important_message" | "hand_off">("normal"); const [capturePrompt, setCapturePrompt] = useState(""); const [importantMessage, setImportantMessage] = useState(""); const [importantMessageOptions, setImportantMessageOptions] = useState([]); const importantMessageResolverRef = useRef<((value: string) => void) | null>(null); const imageCaptureResolverRef = useRef<((value: string) => void) | null>(null); const [rpcMethod, setRpcMethod] = useState(""); const [rpcPayload, setRpcPayload] = useState(""); const [showRpc, setShowRpc] = useState(false); const [qrCodeUrl, setQrCodeUrl] = useState(""); // Clean up RPC resolvers before disconnecting to prevent errors const cleanupRpcResolvers = useCallback(() => { // Clean up any pending important message RPC if (importantMessageResolverRef.current) { const resolver = importantMessageResolverRef.current; importantMessageResolverRef.current = null; try { // Only resolve if room is still connected to avoid RPC errors if (roomState === ConnectionState.Connected) { resolver("disconnected"); } } catch (error) { // Ignore errors during cleanup - room might be disconnecting } } // Clean up any pending image capture RPC if (imageCaptureResolverRef.current) { const resolver = imageCaptureResolverRef.current; imageCaptureResolverRef.current = null; try { // Only resolve if room is still connected to avoid RPC errors if (roomState === ConnectionState.Connected) { resolver(JSON.stringify({ error: "disconnected" })); } } catch (error) { // Ignore errors during cleanup - room might be disconnecting } } }, [roomState]); // Wrapper for disconnect that cleans up RPC resolvers first const handleDisconnect = useCallback(() => { cleanupRpcResolvers(); try { onConnect(false); } catch (error) { // Silently handle any errors during disconnect console.warn("Error during disconnect:", error); } }, [onConnect, cleanupRpcResolvers]); useEffect(() => { if (roomState === ConnectionState.Connected && localParticipant) { try { localParticipant.setCameraEnabled(config.settings.inputs.camera); localParticipant.setMicrophoneEnabled(config.settings.inputs.mic); } catch (error) { console.error("Failed to set camera/microphone:", error); // Retry after a short delay if connection might not be fully ready const retryTimeout = setTimeout(() => { if (roomState === ConnectionState.Connected && localParticipant) { try { localParticipant.setCameraEnabled(config.settings.inputs.camera); localParticipant.setMicrophoneEnabled(config.settings.inputs.mic); } catch (retryError) { console.error("Failed to set camera/microphone on retry:", retryError); } } }, 500); return () => clearTimeout(retryTimeout); } } }, [config.settings.inputs.camera, config.settings.inputs.mic, localParticipant, roomState]); useEffect(() => { if (!localParticipant || roomState !== ConnectionState.Connected) { return; } localParticipant.registerRpcMethod( 'getUserLocation', async (data: RpcInvocationData) => { try { let params = JSON.parse(data.payload); const position: GeolocationPosition = await new Promise((resolve, reject) => { navigator.geolocation.getCurrentPosition(resolve, reject, { enableHighAccuracy: params.highAccuracy ?? false, timeout: data.responseTimeout, }); }); return JSON.stringify({ latitude: position.coords.latitude, longitude: position.coords.longitude, }); } catch (error) { throw new RpcError(1, "Could not retrieve user location"); } } ); localParticipant.registerRpcMethod( 'askImageCapture', async (data: RpcInvocationData) => { // Return a promise that will be resolved when user captures/uploads an image return new Promise((resolve) => { if (data.payload) { try { const payload = JSON.parse(data.payload); if (payload.prompt) { setCapturePrompt(payload.prompt); } } catch (e) { console.error("Failed to parse askImageCapture payload", e); } } setPhoneMode("capture"); // Store the resolver to be called when user captures/uploads an image imageCaptureResolverRef.current = (imageData: string) => { resolve(imageData); }; }); } ); localParticipant.registerRpcMethod( 'enterHandOffToHumanMode', async () => { setPhoneMode("hand_off"); return JSON.stringify({ success: true }); } ); localParticipant.registerRpcMethod( 'hangUpCall', async () => { // Disconnect the call handleDisconnect(); return JSON.stringify({ success: true }); } ); localParticipant.registerRpcMethod( 'askImportantQuestion', async (data: RpcInvocationData) => { // Return a promise that will be resolved when user makes a selection return new Promise((resolve) => { if (data.payload) { try { const payload = JSON.parse(data.payload); if (payload.message) { setImportantMessage(payload.message); } if (payload.options && Array.isArray(payload.options)) { setImportantMessageOptions(payload.options); } else { // Default fallback if no options provided setImportantMessageOptions(["确认"]); } } catch (e) { console.error("Failed to parse askImportantQuestion payload", e); } } setPhoneMode("important_message"); // Store the resolver to be called when user makes a selection importantMessageResolverRef.current = (selection: string) => { resolve(JSON.stringify({ selection })); }; }); } ); }, [localParticipant, roomState, handleDisconnect]); useEffect(() => { if (roomState === ConnectionState.Connected) { setPhoneMode("normal"); } else if (roomState === ConnectionState.Disconnected) { setPhoneMode("normal"); // Clean up any pending important message RPC if (importantMessageResolverRef.current) { importantMessageResolverRef.current("disconnected"); importantMessageResolverRef.current = null; } // Clean up any pending image capture RPC if (imageCaptureResolverRef.current) { imageCaptureResolverRef.current(JSON.stringify({ error: "disconnected" })); imageCaptureResolverRef.current = null; } } }, [roomState]); useEffect(() => { // When agent starts speaking, exit capture mode (and PhoneSimulator will clear processing image) // Only exit if we are NOT waiting for capture (resolver is null), meaning this speech is likely the analysis result if (voiceAssistant.state === "speaking" && phoneMode === "capture" && !imageCaptureResolverRef.current) { setPhoneMode("normal"); setCapturePrompt(""); } }, [voiceAssistant.state, phoneMode]); useEffect(() => { // Also exit capture mode if we receive a completion message in chat (in case agent doesn't speak immediately) if (chatMessages.length > 0) { const lastMsg = chatMessages[chatMessages.length - 1]; if (lastMsg.message && lastMsg.message.includes("✅ Result: ask_image_capture") && phoneMode === "capture") { setPhoneMode("normal"); setCapturePrompt(""); } } }, [chatMessages, phoneMode]); useEffect(() => { if (!localParticipant || roomState !== ConnectionState.Connected) { return; } localParticipant.registerRpcMethod( 'setThemeColor', async (data: RpcInvocationData) => { try { const params = JSON.parse(data.payload); const color = params.color; // 验证颜色是否在允许的列表中 if (!themeColors.includes(color)) { throw new RpcError(2, `Invalid color. Must be one of: ${themeColors.join(', ')}`); } // 更新主题颜色 const userSettings = { ...config.settings }; userSettings.theme_color = color; setUserSettings(userSettings); return JSON.stringify({ success: true, color }); } catch (error) { if (error instanceof RpcError) { throw error; } throw new RpcError(1, `Could not set theme color: ${error instanceof Error ? error.message : String(error)}`); } } ); }, [localParticipant, roomState, config.settings, themeColors, setUserSettings]); const agentVideoTrack = tracks.find( (trackRef) => trackRef.publication.kind === Track.Kind.Video && trackRef.participant.isAgent, ); const localTracks = tracks.filter( ({ participant }) => participant instanceof LocalParticipant, ); const localCameraTrack = localTracks.find( ({ source }) => source === Track.Source.Camera, ); const localScreenTrack = localTracks.find( ({ source }) => source === Track.Source.ScreenShare, ); const localMicTrack = localTracks.find( ({ source }) => source === Track.Source.Microphone, ); const handleImportantMessageAction = useCallback(async (text: string) => { setPhoneMode("normal"); setImportantMessage(""); setImportantMessageOptions([]); // Resolve the RPC with the user's selection if (importantMessageResolverRef.current) { importantMessageResolverRef.current(text || "确认"); importantMessageResolverRef.current = null; } }, []); const onDataReceived = useCallback( (msg: any) => { if (msg.topic === "transcription") { const decoded = JSON.parse( new TextDecoder("utf-8").decode(msg.payload), ); let timestamp = new Date().getTime(); if ("timestamp" in decoded && decoded.timestamp > 0) { timestamp = decoded.timestamp; } setTranscripts([ ...transcripts, { name: "You", message: decoded.text, timestamp: timestamp, isSelf: true, }, ]); } }, [transcripts], ); useDataChannel(onDataReceived); const videoTileContent = useMemo(() => { const videoFitClassName = `object-${config.video_fit || "contain"}`; const disconnectedContent = (
No agent video track. Connect to get started.
); const loadingContent = (
Waiting for agent video track…
); const videoContent = ( ); let content = null; if (roomState === ConnectionState.Disconnected) { content = disconnectedContent; } else if (agentVideoTrack) { content = videoContent; } else { content = loadingContent; } return (
{content}
); }, [agentVideoTrack, config, roomState]); useEffect(() => { if (typeof document !== "undefined") { document.body.style.setProperty( "--lk-theme-color", // @ts-ignore tailwindTheme.colors[config.settings.theme_color]["500"], ); document.body.style.setProperty( "--lk-drop-shadow", `var(--lk-theme-color) 0px 0px 18px`, ); } }, [config.settings.theme_color]); useEffect(() => { if (typeof window !== "undefined") { setQrCodeUrl(window.location.href); } }, []); const audioTileContent = useMemo(() => { const disconnectedContent = (
No agent audio track. Connect to get started.
); const waitingContent = (
Waiting for agent audio track…
); const visualizerContent = (
); if (roomState === ConnectionState.Disconnected) { return disconnectedContent; } if (!voiceAssistant.audioTrack) { return waitingContent; } return visualizerContent; }, [ voiceAssistant.audioTrack, config.settings.theme_color, roomState, voiceAssistant.state, ]); const chatTileContent = useMemo(() => { if (voiceAssistant.agent) { return ( ); } return <>; }, [ config.settings.theme_color, voiceAssistant.audioTrack, voiceAssistant.agent, phoneMode, ]); const instructionsContent = ( <>