949 lines
32 KiB
TypeScript

"use client";
import { LoadingSVG } from "@/components/button/LoadingSVG";
import { ChatMessageType } from "@/components/chat/ChatTile";
import { ColorPicker } from "@/components/colorPicker/ColorPicker";
import { AudioInputTile } from "@/components/config/AudioInputTile";
import { ConfigurationPanelItem } from "@/components/config/ConfigurationPanelItem";
import { NameValueRow } from "@/components/config/NameValueRow";
import { PhoneSimulator } from "@/components/playground/PhoneSimulator";
import { PlaygroundHeader } from "@/components/playground/PlaygroundHeader";
import {
PlaygroundTab,
PlaygroundTabbedTile,
PlaygroundTile,
} from "@/components/playground/PlaygroundTile";
import { useConfig } from "@/hooks/useConfig";
import { TranscriptionTile } from "@/transcriptions/TranscriptionTile";
import {
BarVisualizer,
VideoTrack,
useConnectionState,
useDataChannel,
useLocalParticipant,
useRoomInfo,
useTracks,
useVoiceAssistant,
useRoomContext,
useParticipantAttributes,
useChat,
} from "@livekit/components-react";
import { ConnectionState, LocalParticipant, Track, RpcError, RpcInvocationData } from "livekit-client";
import { QRCodeSVG } from "qrcode.react";
import { ReactNode, useCallback, useEffect, useMemo, useRef, useState } from "react";
import tailwindTheme from "../../lib/tailwindTheme.preval";
import { EditableNameValueRow } from "@/components/config/NameValueRow";
import { AttributesInspector } from "@/components/config/AttributesInspector";
import { RpcPanel } from "./RpcPanel";
export interface PlaygroundMeta {
name: string;
value: string;
}
export interface PlaygroundProps {
logo?: ReactNode;
themeColors: string[];
onConnect: (connect: boolean, opts?: { token: string; url: string }) => void;
}
const headerHeight = 56;
export default function Playground({
logo,
themeColors,
onConnect,
}: PlaygroundProps) {
const { config, setUserSettings } = useConfig();
const { name } = useRoomInfo();
const [transcripts, setTranscripts] = useState<ChatMessageType[]>([]);
const { localParticipant } = useLocalParticipant();
const { send: sendChat, chatMessages } = useChat();
const voiceAssistant = useVoiceAssistant();
const roomState = useConnectionState();
const tracks = useTracks();
const room = useRoomContext();
const [phoneMode, setPhoneMode] = useState<"normal" | "capture" | "important_message" | "hand_off">("normal");
const [capturePrompt, setCapturePrompt] = useState<string>("");
const [importantMessage, setImportantMessage] = useState<string>("");
const [importantMessageOptions, setImportantMessageOptions] = useState<string[]>([]);
const importantMessageResolverRef = useRef<((value: string) => void) | null>(null);
const imageCaptureResolverRef = useRef<((value: string) => void) | null>(null);
const [rpcMethod, setRpcMethod] = useState("");
const [rpcPayload, setRpcPayload] = useState("");
const [showRpc, setShowRpc] = useState(false);
const [qrCodeUrl, setQrCodeUrl] = useState<string>("");
// Clean up RPC resolvers before disconnecting to prevent errors
const cleanupRpcResolvers = useCallback(() => {
// Clean up any pending important message RPC
if (importantMessageResolverRef.current) {
const resolver = importantMessageResolverRef.current;
importantMessageResolverRef.current = null;
try {
// Only resolve if room is still connected to avoid RPC errors
if (roomState === ConnectionState.Connected) {
resolver("disconnected");
}
} catch (error) {
// Ignore errors during cleanup - room might be disconnecting
}
}
// Clean up any pending image capture RPC
if (imageCaptureResolverRef.current) {
const resolver = imageCaptureResolverRef.current;
imageCaptureResolverRef.current = null;
try {
// Only resolve if room is still connected to avoid RPC errors
if (roomState === ConnectionState.Connected) {
resolver(JSON.stringify({ error: "disconnected" }));
}
} catch (error) {
// Ignore errors during cleanup - room might be disconnecting
}
}
}, [roomState]);
// Wrapper for disconnect that cleans up RPC resolvers first
const handleDisconnect = useCallback(() => {
cleanupRpcResolvers();
try {
onConnect(false);
} catch (error) {
// Silently handle any errors during disconnect
console.warn("Error during disconnect:", error);
}
}, [onConnect, cleanupRpcResolvers]);
useEffect(() => {
if (roomState === ConnectionState.Connected && localParticipant) {
try {
localParticipant.setCameraEnabled(config.settings.inputs.camera);
localParticipant.setMicrophoneEnabled(config.settings.inputs.mic);
} catch (error) {
console.error("Failed to set camera/microphone:", error);
// Retry after a short delay if connection might not be fully ready
const retryTimeout = setTimeout(() => {
if (roomState === ConnectionState.Connected && localParticipant) {
try {
localParticipant.setCameraEnabled(config.settings.inputs.camera);
localParticipant.setMicrophoneEnabled(config.settings.inputs.mic);
} catch (retryError) {
console.error("Failed to set camera/microphone on retry:", retryError);
}
}
}, 500);
return () => clearTimeout(retryTimeout);
}
}
}, [config.settings.inputs.camera, config.settings.inputs.mic, localParticipant, roomState]);
useEffect(() => {
if (!localParticipant || roomState !== ConnectionState.Connected) {
return;
}
localParticipant.registerRpcMethod(
'getUserLocation',
async (data: RpcInvocationData) => {
try {
let params = JSON.parse(data.payload);
const position: GeolocationPosition = await new Promise((resolve, reject) => {
navigator.geolocation.getCurrentPosition(resolve, reject, {
enableHighAccuracy: params.highAccuracy ?? false,
timeout: data.responseTimeout,
});
});
return JSON.stringify({
latitude: position.coords.latitude,
longitude: position.coords.longitude,
});
} catch (error) {
throw new RpcError(1, "Could not retrieve user location");
}
}
);
localParticipant.registerRpcMethod(
'askImageCapture',
async (data: RpcInvocationData) => {
// Return a promise that will be resolved when user captures/uploads an image
return new Promise<string>((resolve) => {
if (data.payload) {
try {
const payload = JSON.parse(data.payload);
if (payload.prompt) {
setCapturePrompt(payload.prompt);
}
} catch (e) {
console.error("Failed to parse askImageCapture payload", e);
}
}
setPhoneMode("capture");
// Store the resolver to be called when user captures/uploads an image
imageCaptureResolverRef.current = (imageData: string) => {
resolve(imageData);
};
});
}
);
localParticipant.registerRpcMethod(
'enterHandOffToHumanMode',
async () => {
setPhoneMode("hand_off");
return JSON.stringify({ success: true });
}
);
localParticipant.registerRpcMethod(
'hangUpCall',
async () => {
// Disconnect the call
handleDisconnect();
return JSON.stringify({ success: true });
}
);
localParticipant.registerRpcMethod(
'askImportantQuestion',
async (data: RpcInvocationData) => {
// Return a promise that will be resolved when user makes a selection
return new Promise<string>((resolve) => {
if (data.payload) {
try {
const payload = JSON.parse(data.payload);
if (payload.message) {
setImportantMessage(payload.message);
}
if (payload.options && Array.isArray(payload.options)) {
setImportantMessageOptions(payload.options);
} else {
// Default fallback if no options provided
setImportantMessageOptions(["确认"]);
}
} catch (e) {
console.error("Failed to parse askImportantQuestion payload", e);
}
}
setPhoneMode("important_message");
// Store the resolver to be called when user makes a selection
importantMessageResolverRef.current = (selection: string) => {
resolve(JSON.stringify({ selection }));
};
});
}
);
}, [localParticipant, roomState, handleDisconnect]);
useEffect(() => {
if (roomState === ConnectionState.Connected) {
setPhoneMode("normal");
} else if (roomState === ConnectionState.Disconnected) {
setPhoneMode("normal");
// Clean up any pending important message RPC
if (importantMessageResolverRef.current) {
importantMessageResolverRef.current("disconnected");
importantMessageResolverRef.current = null;
}
// Clean up any pending image capture RPC
if (imageCaptureResolverRef.current) {
imageCaptureResolverRef.current(JSON.stringify({ error: "disconnected" }));
imageCaptureResolverRef.current = null;
}
}
}, [roomState]);
useEffect(() => {
// When agent starts speaking, exit capture mode (and PhoneSimulator will clear processing image)
// Only exit if we are NOT waiting for capture (resolver is null), meaning this speech is likely the analysis result
if (voiceAssistant.state === "speaking" && phoneMode === "capture" && !imageCaptureResolverRef.current) {
setPhoneMode("normal");
setCapturePrompt("");
}
}, [voiceAssistant.state, phoneMode]);
useEffect(() => {
// Also exit capture mode if we receive a completion message in chat (in case agent doesn't speak immediately)
if (chatMessages.length > 0) {
const lastMsg = chatMessages[chatMessages.length - 1];
if (lastMsg.message && lastMsg.message.includes("✅ Result: ask_image_capture") && phoneMode === "capture") {
setPhoneMode("normal");
setCapturePrompt("");
}
}
}, [chatMessages, phoneMode]);
useEffect(() => {
if (!localParticipant || roomState !== ConnectionState.Connected) {
return;
}
localParticipant.registerRpcMethod(
'setThemeColor',
async (data: RpcInvocationData) => {
try {
const params = JSON.parse(data.payload);
const color = params.color;
// 验证颜色是否在允许的列表中
if (!themeColors.includes(color)) {
throw new RpcError(2, `Invalid color. Must be one of: ${themeColors.join(', ')}`);
}
// 更新主题颜色
const userSettings = { ...config.settings };
userSettings.theme_color = color;
setUserSettings(userSettings);
return JSON.stringify({ success: true, color });
} catch (error) {
if (error instanceof RpcError) {
throw error;
}
throw new RpcError(1, `Could not set theme color: ${error instanceof Error ? error.message : String(error)}`);
}
}
);
}, [localParticipant, roomState, config.settings, themeColors, setUserSettings]);
const agentVideoTrack = tracks.find(
(trackRef) =>
trackRef.publication.kind === Track.Kind.Video &&
trackRef.participant.isAgent,
);
const localTracks = tracks.filter(
({ participant }) => participant instanceof LocalParticipant,
);
const localCameraTrack = localTracks.find(
({ source }) => source === Track.Source.Camera,
);
const localScreenTrack = localTracks.find(
({ source }) => source === Track.Source.ScreenShare,
);
const localMicTrack = localTracks.find(
({ source }) => source === Track.Source.Microphone,
);
const handleImportantMessageAction = useCallback(async (text: string) => {
setPhoneMode("normal");
setImportantMessage("");
setImportantMessageOptions([]);
// Resolve the RPC with the user's selection
if (importantMessageResolverRef.current) {
importantMessageResolverRef.current(text || "确认");
importantMessageResolverRef.current = null;
}
}, []);
const onDataReceived = useCallback(
(msg: any) => {
if (msg.topic === "transcription") {
const decoded = JSON.parse(
new TextDecoder("utf-8").decode(msg.payload),
);
let timestamp = new Date().getTime();
if ("timestamp" in decoded && decoded.timestamp > 0) {
timestamp = decoded.timestamp;
}
setTranscripts([
...transcripts,
{
name: "You",
message: decoded.text,
timestamp: timestamp,
isSelf: true,
},
]);
}
},
[transcripts],
);
useDataChannel(onDataReceived);
const videoTileContent = useMemo(() => {
const videoFitClassName = `object-${config.video_fit || "contain"}`;
const disconnectedContent = (
<div className="flex items-center justify-center text-gray-700 text-center w-full h-full">
No agent video track. Connect to get started.
</div>
);
const loadingContent = (
<div className="flex flex-col items-center justify-center gap-2 text-gray-700 text-center h-full w-full">
<LoadingSVG />
Waiting for agent video track
</div>
);
const videoContent = (
<VideoTrack
trackRef={agentVideoTrack}
className={`absolute top-1/2 -translate-y-1/2 ${videoFitClassName} object-position-center w-full h-full`}
/>
);
let content = null;
if (roomState === ConnectionState.Disconnected) {
content = disconnectedContent;
} else if (agentVideoTrack) {
content = videoContent;
} else {
content = loadingContent;
}
return (
<div className="flex flex-col w-full grow text-gray-950 bg-black rounded-sm border border-gray-800 relative">
{content}
</div>
);
}, [agentVideoTrack, config, roomState]);
useEffect(() => {
if (typeof document !== "undefined") {
document.body.style.setProperty(
"--lk-theme-color",
// @ts-ignore
tailwindTheme.colors[config.settings.theme_color]["500"],
);
document.body.style.setProperty(
"--lk-drop-shadow",
`var(--lk-theme-color) 0px 0px 18px`,
);
}
}, [config.settings.theme_color]);
useEffect(() => {
if (typeof window !== "undefined") {
setQrCodeUrl(window.location.href);
}
}, []);
const audioTileContent = useMemo(() => {
const disconnectedContent = (
<div className="flex flex-col items-center justify-center gap-2 text-gray-700 text-center w-full">
No agent audio track. Connect to get started.
</div>
);
const waitingContent = (
<div className="flex flex-col items-center gap-2 text-gray-700 text-center w-full">
<LoadingSVG />
Waiting for agent audio track
</div>
);
const visualizerContent = (
<div
className={`flex items-center justify-center w-full h-48 [--lk-va-bar-width:30px] [--lk-va-bar-gap:20px] [--lk-fg:var(--lk-theme-color)]`}
>
<BarVisualizer
state={voiceAssistant.state}
trackRef={voiceAssistant.audioTrack}
barCount={5}
options={{ minHeight: 20 }}
/>
</div>
);
if (roomState === ConnectionState.Disconnected) {
return disconnectedContent;
}
if (!voiceAssistant.audioTrack) {
return waitingContent;
}
return visualizerContent;
}, [
voiceAssistant.audioTrack,
config.settings.theme_color,
roomState,
voiceAssistant.state,
]);
const chatTileContent = useMemo(() => {
if (voiceAssistant.agent) {
return (
<TranscriptionTile
agentAudioTrack={voiceAssistant.audioTrack}
accentColor={config.settings.theme_color}
inputDisabled={phoneMode === "important_message" || phoneMode === "hand_off"}
/>
);
}
return <></>;
}, [
config.settings.theme_color,
voiceAssistant.audioTrack,
voiceAssistant.agent,
phoneMode,
]);
const instructionsContent = (
<>
<ConfigurationPanelItem title="Instructions">
<textarea
className="w-full bg-gray-950 text-white text-sm p-3 rounded-md border border-gray-800 focus:border-gray-600 focus:outline-none transition-colors resize-none disabled:opacity-50 disabled:cursor-not-allowed"
style={{ minHeight: "80px" }}
rows={3}
placeholder="Enter system instructions for the agent..."
value={config.settings.instructions}
onChange={(e) => {
const newSettings = { ...config.settings };
newSettings.instructions = e.target.value;
setUserSettings(newSettings);
}}
disabled={roomState !== ConnectionState.Disconnected}
/>
</ConfigurationPanelItem>
<ConfigurationPanelItem title="Color">
<ColorPicker
colors={themeColors}
selectedColor={config.settings.theme_color}
onSelect={(color) => {
const userSettings = { ...config.settings };
userSettings.theme_color = color;
setUserSettings(userSettings);
}}
/>
</ConfigurationPanelItem>
</>
);
const handleRpcCall = useCallback(async () => {
if (!voiceAssistant.agent || !room) {
throw new Error("No agent or room available");
}
const response = await room.localParticipant.performRpc({
destinationIdentity: voiceAssistant.agent.identity,
method: rpcMethod,
payload: rpcPayload,
});
return response;
}, [room, rpcMethod, rpcPayload, voiceAssistant.agent]);
const agentAttributes = useParticipantAttributes({
participant: voiceAssistant.agent,
});
const settingsTileContent = useMemo(() => {
return (
<div className="flex flex-col h-full w-full items-start overflow-y-auto">
{/* {config.description && (
<ConfigurationPanelItem title="Description">
{config.description}
</ConfigurationPanelItem>
)} */}
{/* <ConfigurationPanelItem title="Room">
<div className="flex flex-col gap-2">
<EditableNameValueRow
name="Room name"
value={
roomState === ConnectionState.Connected
? name
: config.settings.room_name
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.room_name = value;
setUserSettings(newSettings);
}}
placeholder="Auto"
editable={roomState !== ConnectionState.Connected}
/>
<NameValueRow
name="Status"
value={
roomState === ConnectionState.Connecting ? (
<LoadingSVG diameter={16} strokeWidth={2} />
) : (
roomState.charAt(0).toUpperCase() + roomState.slice(1)
)
}
valueColor={
roomState === ConnectionState.Connected
? `${config.settings.theme_color}-500`
: "gray-500"
}
/>
</div>
</ConfigurationPanelItem> */}
{/* <ConfigurationPanelItem title="Agent">
<div className="flex flex-col gap-2">
<EditableNameValueRow
name="Agent name"
value={
roomState === ConnectionState.Connected
? config.settings.agent_name || "None"
: config.settings.agent_name || ""
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.agent_name = value;
setUserSettings(newSettings);
}}
placeholder="None"
editable={roomState !== ConnectionState.Connected}
/>
<NameValueRow
name="Identity"
value={
voiceAssistant.agent ? (
voiceAssistant.agent.identity
) : roomState === ConnectionState.Connected ? (
<LoadingSVG diameter={12} strokeWidth={2} />
) : (
"No agent connected"
)
}
valueColor={
voiceAssistant.agent
? `${config.settings.theme_color}-500`
: "gray-500"
}
/>
{roomState === ConnectionState.Connected &&
voiceAssistant.agent && (
<AttributesInspector
attributes={Object.entries(
agentAttributes.attributes || {},
).map(([key, value], index) => ({
id: `agent-attr-${index}`,
key,
value: String(value),
}))}
onAttributesChange={() => {}}
themeColor={config.settings.theme_color}
disabled={true}
/>
)}
<p className="text-xs text-gray-500 text-right">
Set an agent name to use{" "}
<a
href="https://docs.livekit.io/agents/worker/dispatch#explicit"
target="_blank"
rel="noopener noreferrer"
className="text-gray-500 hover:text-gray-300 underline"
>
explicit dispatch
</a>
.
</p>
</div>
</ConfigurationPanelItem> */}
{/* <ConfigurationPanelItem title="User">
<div className="flex flex-col gap-2">
<EditableNameValueRow
name="Name"
value={
roomState === ConnectionState.Connected
? localParticipant?.name || ""
: config.settings.participant_name || ""
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.participant_name = value;
setUserSettings(newSettings);
}}
placeholder="Auto"
editable={roomState !== ConnectionState.Connected}
/>
<EditableNameValueRow
name="Identity"
value={
roomState === ConnectionState.Connected
? localParticipant?.identity || ""
: config.settings.participant_id || ""
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.participant_id = value;
setUserSettings(newSettings);
}}
placeholder="Auto"
editable={roomState !== ConnectionState.Connected}
/>
<AttributesInspector
attributes={config.settings.attributes || []}
onAttributesChange={(newAttributes) => {
const newSettings = { ...config.settings };
newSettings.attributes = newAttributes;
setUserSettings(newSettings);
}}
metadata={config.settings.metadata}
onMetadataChange={(metadata) => {
const newSettings = { ...config.settings };
newSettings.metadata = metadata;
setUserSettings(newSettings);
}}
themeColor={config.settings.theme_color}
disabled={false}
connectionState={roomState}
/>
</div>
</ConfigurationPanelItem> */}
{roomState === ConnectionState.Connected &&
config.settings.inputs.screen && (
<ConfigurationPanelItem
title="Screen"
source={Track.Source.ScreenShare}
>
{localScreenTrack ? (
<div className="relative">
<VideoTrack
className="rounded-sm border border-gray-800 opacity-70 w-full"
trackRef={localScreenTrack}
/>
</div>
) : (
<div className="flex items-center justify-center text-gray-700 text-center w-full h-full">
Press the button above to share your screen.
</div>
)}
</ConfigurationPanelItem>
)}
{roomState === ConnectionState.Connected && voiceAssistant.agent && (
<RpcPanel
config={config}
rpcMethod={rpcMethod}
rpcPayload={rpcPayload}
setRpcMethod={setRpcMethod}
setRpcPayload={setRpcPayload}
handleRpcCall={handleRpcCall}
/>
)}
{localCameraTrack && (
<ConfigurationPanelItem title="Camera" source={Track.Source.Camera}>
<div className="relative">
<VideoTrack
className="rounded-sm border border-gray-800 opacity-70 w-full"
trackRef={localCameraTrack}
/>
</div>
</ConfigurationPanelItem>
)}
{localMicTrack && (
<ConfigurationPanelItem
title="Microphone"
source={Track.Source.Microphone}
>
<AudioInputTile trackRef={localMicTrack} />
</ConfigurationPanelItem>
)}
{config.show_qr && qrCodeUrl && (
<div className="w-full">
<ConfigurationPanelItem title="QR Code">
<QRCodeSVG value={qrCodeUrl} width="128" />
</ConfigurationPanelItem>
</div>
)}
</div>
);
}, [
config.settings,
config.show_qr,
localParticipant,
name,
roomState,
localCameraTrack,
localScreenTrack,
localMicTrack,
themeColors,
setUserSettings,
voiceAssistant.agent,
rpcMethod,
rpcPayload,
handleRpcCall,
showRpc,
setShowRpc,
]);
let mobileTabs: PlaygroundTab[] = [];
if (config.settings.outputs.video || config.settings.outputs.audio) {
mobileTabs.push({
title: "Phone",
content: (
<PlaygroundTile
className="w-full h-full grow"
childrenClassName="justify-center"
>
<PhoneSimulator
onConnect={() => onConnect(true)}
onDisconnect={handleDisconnect}
phoneMode={phoneMode}
capturePrompt={capturePrompt}
importantMessage={importantMessage}
importantMessageOptions={importantMessageOptions}
onImportantMessageAction={handleImportantMessageAction}
onCapture={async (content: File) => {
if (localParticipant) {
// Send file via LiveKit byte stream
await localParticipant.sendFile(content, { topic: "image" });
// Resolve RPC to signal completion (without image data)
if (imageCaptureResolverRef.current) {
// Do NOT exit capture mode immediately - wait for agent response (voiceAssistant.state === "speaking")
// setPhoneMode("normal");
// setCapturePrompt("");
imageCaptureResolverRef.current(JSON.stringify({ success: true }));
imageCaptureResolverRef.current = null;
}
}
}}
/>
</PlaygroundTile>
),
});
}
if (config.settings.chat) {
mobileTabs.push({
title: "Chat",
content: chatTileContent,
});
mobileTabs.push({
title: "Instructions",
content: (
<PlaygroundTile
padding={false}
backgroundColor="gray-950"
className="h-full w-full grow items-start overflow-y-auto"
childrenClassName="h-full grow items-start"
>
{instructionsContent}
</PlaygroundTile>
),
});
}
mobileTabs.push({
title: "Settings",
content: (
<PlaygroundTile
padding={false}
backgroundColor="gray-950"
className="h-full w-full basis-1/4 items-start overflow-y-auto flex"
childrenClassName="h-full grow items-start"
>
{settingsTileContent}
</PlaygroundTile>
),
});
return (
<>
{/* <PlaygroundHeader
title={config.title}
logo={logo}
githubLink={config.github_link}
height={headerHeight}
accentColor={config.settings.theme_color}
connectionState={roomState}
onConnectClicked={() => {
if (roomState === ConnectionState.Disconnected) {
onConnect(true);
} else {
handleDisconnect();
}
}}
/> */}
<div
className={`flex gap-4 py-4 grow w-full selection:bg-${config.settings.theme_color}-900`}
style={{ height: `100%` }}
>
<div className="flex flex-col grow basis-1/2 gap-4 h-full lg:hidden">
<PlaygroundTabbedTile
className="h-full"
tabs={mobileTabs}
initialTab={mobileTabs.length - 1}
/>
</div>
<div
className={`flex-col grow basis-1/2 gap-4 h-full hidden lg:${
!config.settings.outputs.audio && !config.settings.outputs.video
? "hidden"
: "flex"
}`}
>
<PlaygroundTile
title="Phone"
className="w-full h-full grow"
childrenClassName="justify-center"
>
<PhoneSimulator
onConnect={() => onConnect(true)}
onDisconnect={handleDisconnect}
phoneMode={phoneMode}
capturePrompt={capturePrompt}
importantMessage={importantMessage}
importantMessageOptions={importantMessageOptions}
onImportantMessageAction={handleImportantMessageAction}
onCapture={async (content: File) => {
if (localParticipant) {
// Send file via LiveKit byte stream
await localParticipant.sendFile(content, { topic: "image" });
// Resolve RPC to signal completion (without image data)
if (imageCaptureResolverRef.current) {
// Do NOT exit capture mode immediately - wait for agent response (voiceAssistant.state === "speaking")
// setPhoneMode("normal");
// setCapturePrompt("");
imageCaptureResolverRef.current(JSON.stringify({ success: true }));
imageCaptureResolverRef.current = null;
}
}
}}
/>
</PlaygroundTile>
</div>
{config.settings.chat && (
<div className="flex flex-col h-full grow basis-1/4 hidden lg:flex gap-4">
<PlaygroundTile
padding={false}
backgroundColor="gray-950"
className="h-auto w-full flex-none min-h-0"
childrenClassName="items-start"
>
{instructionsContent}
</PlaygroundTile>
<PlaygroundTile
title="Chat"
className="w-full grow min-h-0"
>
{chatTileContent}
</PlaygroundTile>
</div>
)}
{/* <PlaygroundTile
padding={false}
backgroundColor="gray-950"
className="h-full w-full basis-1/4 items-start overflow-y-auto hidden max-w-[480px] lg:flex"
childrenClassName="h-full grow items-start"
>
{settingsTileContent}
</PlaygroundTile> */}
</div>
</>
);
}