Compare commits

...

10 Commits

Author SHA1 Message Date
1548fd554a commit backend agent 2026-01-15 14:42:52 +08:00
Ben Cherry
0218a5a002 Add attributes inspector and explicit dispatch support (#144) 2025-05-29 15:26:04 -07:00
Théo Monnom
5eddfa935c use video_fit contain by default (#89)
Co-authored-by: Ben Cherry <ben@livekit.io>
2025-05-28 16:05:39 -07:00
Mahmoud Hemaid
9e2b7fcc61 feat: Add support for participant name, metadata, and attributes (#142) 2025-05-28 16:05:22 -07:00
renovate[bot]
575da78aa1 Update LiveKit dependencies (non-major) (#135)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-08 22:20:20 -07:00
Ben Cherry
c05ea63dae Add screenshare support (#138) 2025-05-07 09:05:41 -07:00
Ben Cherry
238857f368 Update agents playground default description (#137) 2025-04-23 09:41:24 -07:00
Ben Cherry
4e478cb740 Bump components to 2.9.3 for avatar support (#136) 2025-04-23 00:24:24 -07:00
renovate[bot]
da230e601e Update devDependencies (non-major) (#117)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-04 22:53:00 -07:00
Ben Cherry
5416e0fb9f Support text-only agent (#131) 2025-03-26 15:17:57 -07:00
33 changed files with 1760 additions and 732 deletions

View File

@@ -7,10 +7,10 @@ NEXT_PUBLIC_LIVEKIT_URL=wss://YOUR_LIVEKIT_URL
# Application Configuration
NEXT_PUBLIC_APP_CONFIG="
title: 'LiveKit Agent Playground'
description: 'LiveKit Agent Playground allows you to test your LiveKit Agent integration by connecting to your LiveKit Cloud or self-hosted instance.'
title: 'LiveKit Agents Playground'
description: 'A virtual workbench for your multimodal AI agents.'
github_link: 'https://github.com/livekit/agents-playground'
video_fit: 'cover' # 'contain' or 'cover'
video_fit: 'contain' # 'contain' or 'cover'
settings:
editable: true # Should the user be able to edit settings in-app
theme_color: 'cyan'

View File

@@ -1,5 +1,5 @@
name: "\U0001F41E Bug report"
description: Report an issue with LiveKit Agent Playground
description: Report an issue with LiveKit Agents Playground
body:
- type: markdown
attributes:

View File

@@ -11,8 +11,10 @@
# LiveKit Agents Playground
<!--BEGIN_DESCRIPTION-->
The Agent Playground is designed for quickly prototyping with server side agents built with [LiveKit Agents Framework](https://github.com/livekit/agents). Easily tap into LiveKit WebRTC sessions and process or generate audio, video, and data streams.
The playground includes components to fully interact with any LiveKit agent, through video, audio and chat.
The Agents Playground is designed for quickly prototyping with server side agents built with [LiveKit Agents Framework](https://github.com/livekit/agents). Easily tap into LiveKit WebRTC sessions and process or generate audio, video, and data streams.
The playground includes components to fully interact with any LiveKit agent, through video, audio and chat.
<!--END_DESCRIPTION-->
## Docs and references
@@ -69,7 +71,9 @@ NEXT_PUBLIC_LIVEKIT_URL=wss://<Your Cloud URL>
- Mobile device sizes not supported currently
<!--BEGIN_REPO_NAV-->
<br/><table>
<thead><tr><th colspan="2">LiveKit Ecosystem</th></tr></thead>
<tbody>
<tr><td>LiveKit SDKs</td><td><a href="https://github.com/livekit/client-sdk-js">Browser</a> · <a href="https://github.com/livekit/client-sdk-swift">iOS/macOS/visionOS</a> · <a href="https://github.com/livekit/client-sdk-android">Android</a> · <a href="https://github.com/livekit/client-sdk-flutter">Flutter</a> · <a href="https://github.com/livekit/client-sdk-react-native">React Native</a> · <a href="https://github.com/livekit/rust-sdks">Rust</a> · <a href="https://github.com/livekit/node-sdks">Node.js</a> · <a href="https://github.com/livekit/python-sdks">Python</a> · <a href="https://github.com/livekit/client-sdk-unity">Unity</a> · <a href="https://github.com/livekit/client-sdk-unity-web">Unity (WebGL)</a></td></tr><tr></tr>

22
agents/.env.example Normal file
View File

@@ -0,0 +1,22 @@
LIVEKIT_API_SECRET="secret"
LIVEKIT_API_KEY="devkey"
LIVEKIT_URL="ws://127.0.0.1:7880"
MINIMAX_API_KEY="eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJHcm91cE5hbWUiOiJYaW4gV2FuZyIsIlVzZXJOYW1lIjoiWGluIFdhbmciLCJBY2NvdW50IjoiIiwiU3ViamVjdElEIjoiMTg1NzMzNTM0ODcwNzcyNTM2MyIsIlBob25lIjoiIiwiR3JvdXBJRCI6IjE4NTczMzUzNDg2OTkzMzY3NTUiLCJQYWdlTmFtZSI6IiIsIk1haWwiOiJ3YW5neGluMzE0MTU5MjZAZ21haWwuY29tIiwiQ3JlYXRlVGltZSI6IjIwMjUtMTItMDMgMTQ6MjE6MDkiLCJUb2tlblR5cGUiOjEsImlzcyI6Im1pbmltYXgifQ.MVL2R-3gxcItQHl7cgfQuqcdYTUI80_QtfXeZtNxRNUPFSpPp_a2Om6K0BrwRE7PvnSyr2Kv-w2yeQoXc_SpgoecVgct-Qfl4-w9pgRySMBfIdMWekxuvI6KDIHjnaFyvls11yiuWAeKWK6PHuil_JqttgSAOz-HitbAmuuGyhpnHrE9u-QhoXcXy9s9lAyhuXMq_OyB06Ps58B5u8ziIqQZQG6SNU6_uMiql3Nl2QU4ukGAoRqUQn6lqQExGdFv43FDwlG6NJ_avWnsTnkjcnPqqay6Sf50zyaRl1sm_c1EPkEWcrLOz1aWHSUrA7rhCJMaU5VCvPtTaGJUZk9_yA"
DEEPSEEK_API_KEY="sk-230701ff1b6143ecbf322b3170606016"
AZURE_SPEECH_KEY="48KfrDwcw6hM0g0WtmF0ZDigaMW8YdUwjrlYDYIL6Rftp5U0V1yfJQQJ99BAAC3pKaRXJ3w3AAAYACOGCI1o"
AZURE_SPEECH_REGION="eastasia"
CARTESIA_API_KEY="sk_car_Yu-_vYZsCq8ZOe-WQn43t"
CARTESIA_LANGUAGE="zh"
SILICONFLOW_API_KEY="sk-thmzysdpqqmhqxxshyqoxvjeiflexjdgaftyufrsgrhpjnyx"
DASHSCOPE_API_KEY="sk-391f5126d18345d497c6e8717c8c9ad7"
VOLCENGINE_TTS_ACCESS_TOKEN="4ustCTIpdCq8dE_msFrZvFn4nDpioIVo"
VOLCENGINE_STT_ACCESS_TOKEN="QiO0AptfmU0GLTSitwn7t5-zeo4gJ6K1"
VOLCENGINE_LLM_API_KEY="1224b4c6-ada7-4c43-be2b-8d48c686e3ff"
VOLCENGINE_REALTIME_ACCESS_TOKEN="1224b4c6-ada7-4c43-be2b-8d48c686e3ff"

245
agents/my_basic_agent_debate.py Executable file
View File

@@ -0,0 +1,245 @@
import argparse
import asyncio
import base64
import json
import logging
import os
import sys
from dataclasses import asdict, dataclass
from functools import partial
import aiohttp
import httpx
from dotenv import load_dotenv
from livekit import api, rtc
from livekit.agents import (
Agent,
AgentSession,
AudioConfig,
BackgroundAudioPlayer,
BuiltinAudioClip,
JobContext,
JobProcess,
MetricsCollectedEvent,
RoomInputOptions,
RoomOutputOptions,
RunContext,
WorkerOptions,
cli,
get_job_context,
metrics,
)
from livekit.agents.llm import ImageContent, ToolError, function_tool
from livekit.agents.voice.avatar import DataStreamAudioOutput
from livekit.agents.voice.io import PlaybackFinishedEvent
from livekit.agents.voice.room_io import ATTRIBUTE_PUBLISH_ON_BEHALF
from livekit.plugins import silero
# from livekit.plugins.turn_detector.multilingual import MultilingualModel
from livekit.plugins import openai, azure, minimax, cartesia, deepgram, aliyun
from pydantic_core.core_schema import SerializerFunction
# uncomment to enable Krisp background voice/noise cancellation
# from livekit.plugins import noise_cancellation
logger = logging.getLogger("basic-agent")
load_dotenv()
AVATAR_IDENTITY = "avatar_worker"
@dataclass
class AvatarConnectionInfo:
room_name: str
url: str
"""LiveKit server URL"""
token: str
"""Token for avatar worker to join"""
class MyAgent(Agent):
def __init__(self) -> None:
self._tasks = [] # Prevent garbage collection of running tasks
super().__init__(
instructions="Your name is Kelly. You would interact with users via voice."
"with that in mind keep your responses concise and to the point."
"do not use emojis, asterisks, markdown, or other special characters in your responses."
"You are curious and friendly, and have a sense of humor."
"你正在和用户辩论AI是否可以取代人工你的观点是AI可以取代人工"
"you will speak chinese to the user",
)
async def on_enter(self):
# when the agent is added to the session, it'll generate a reply
# according to its instructions
self.session.generate_reply()
def prewarm(proc: JobProcess):
proc.userdata["vad"] = silero.VAD.load()
async def launch_avatar(ctx: JobContext, avatar_dispatcher_url: str, avatar_identity: str) -> None:
"""
Send a request to the avatar service for it to join the room
This function should be wrapped in a avatar plugin.
"""
# create a token for the avatar to join the room
token = (
api.AccessToken()
.with_identity(avatar_identity)
.with_name("Avatar Runner")
.with_grants(api.VideoGrants(room_join=True, room=ctx.room.name))
.with_kind("agent")
.with_attributes({ATTRIBUTE_PUBLISH_ON_BEHALF: ctx.local_participant_identity})
.to_jwt()
)
logger.info(f"Sending connection info to avatar dispatcher {avatar_dispatcher_url}")
connection_info = AvatarConnectionInfo(room_name=ctx.room.name, url=ctx._info.url, token=token)
async with httpx.AsyncClient() as client:
response = await client.post(avatar_dispatcher_url, json=asdict(connection_info))
response.raise_for_status()
logger.info("Avatar handshake completed")
async def entrypoint(ctx: JobContext, avatar_dispatcher_url: str = None):
# each log entry will include these fields
ctx.log_context_fields = {
"room": ctx.room.name,
}
logger.info("connecting to room")
await ctx.connect()
logger.info("waiting for participant")
participant = await ctx.wait_for_participant()
logger.info(f"starting agent for participant {participant.identity}")
initial_voice_id = "Chinese (Mandarin)_Male_Announcer"
if participant.attributes.get("voice"):
initial_voice_id = participant.attributes.get("voice")
logger.info(f"User selected voice: {initial_voice_id}")
session = AgentSession(
# Speech-to-text (STT) is your agent's ears, turning the user's speech into text that the LLM can understand
# See all available models at https://docs.livekit.io/agents/models/stt/
# stt="deepgram/nova-3",
# stt=azure.STT(
# speech_key="48KfrDwcw6hM0g0WtmF0ZDigaMW8YdUwjrlYDYIL6Rftp5U0V1yfJQQJ99BAAC3pKaRXJ3w3AAAYACOGCI1o",
# speech_region="eastasia",
# language="zh-CN"
# ),
# stt=deepgram.STT(
# api_key="61dbb8aa4badb820c24029052e106b00f7498598",
# language="zh-CN",
# model="nova-2-general"
# ),
stt = aliyun.STT(model="paraformer-realtime-v2"),
# A Large Language Model (LLM) is your agent's brain, processing user input and generating a response
# See all available models at https://docs.livekit.io/agents/models/llm/
# llm="openai/gpt-4.1-mini",
llm=openai.LLM.with_deepseek(
model='deepseek-chat'
),
# Text-to-speech (TTS) is your agent's voice, turning the LLM's text into speech that the user can hear
# See all available models as well as voice selections at https://docs.livekit.io/agents/models/tts/
# tts="cartesia/sonic-2:9626c31c-bec5-4cca-baa8-f8ba9e84c8bc",
# tts=minimax.TTS(
# model="speech-2.6-turbo",
# voice=initial_voice_id,
# # voice="Friendly_Person",
# # voice="Chinese (Mandarin)_Male_Announcer"
# ),
tts=aliyun.TTS(model="cosyvoice-v2", voice="longcheng_v2"),
# tts=azure.TTS(
# speech_key="48KfrDwcw6hM0g0WtmF0ZDigaMW8YdUwjrlYDYIL6Rftp5U0V1yfJQQJ99BAAC3pKaRXJ3w3AAAYACOGCI1o",
# speech_region="eastasia",
# language='zh-CN'
# ),
# tts = openai.TTS(
# model='kokoro',
# voice='zf_xiaoyi',
# base_url='http://127.0.0.1:8880/v1',
# api_key='not-needed',
# ),
# tts=cartesia.TTS(),
# VAD and turn detection are used to determine when the user is speaking and when the agent should respond
# See more at https://docs.livekit.io/agents/build/turns
# turn_detection=MultilingualModel(),
vad=ctx.proc.userdata["vad"],
# allow the LLM to generate a response while waiting for the end of turn
# See more at https://docs.livekit.io/agents/build/audio/#preemptive-generation
preemptive_generation=True,
# sometimes background noise could interrupt the agent session, these are considered false positive interruptions
# when it's detected, you may resume the agent's speech
resume_false_interruption=True,
false_interruption_timeout=1.0,
)
# log metrics as they are emitted, and total usage after session is over
usage_collector = metrics.UsageCollector()
@session.on("metrics_collected")
def _on_metrics_collected(ev: MetricsCollectedEvent):
metrics.log_metrics(ev.metrics)
usage_collector.collect(ev.metrics)
async def log_usage():
summary = usage_collector.get_summary()
logger.info(f"Usage: {summary}")
# shutdown callbacks are triggered when the session is over
ctx.add_shutdown_callback(log_usage)
# Launch avatar if avatar_dispatcher_url is provided
if avatar_dispatcher_url:
await launch_avatar(ctx, avatar_dispatcher_url, AVATAR_IDENTITY)
session.output.audio = DataStreamAudioOutput(
ctx.room,
destination_identity=AVATAR_IDENTITY,
# (optional) wait for the avatar to publish video track before generating a reply
wait_remote_track=rtc.TrackKind.KIND_VIDEO,
)
@session.output.audio.on("playback_finished")
def on_playback_finished(ev: PlaybackFinishedEvent) -> None:
# the avatar should notify when the audio playback is finished
logger.info(
"playback_finished",
extra={
"playback_position": ev.playback_position,
"interrupted": ev.interrupted,
},
)
await session.start(
agent=MyAgent(),
room=ctx.room,
room_input_options=RoomInputOptions(
# uncomment to enable Krisp BVC noise cancellation
# noise_cancellation=noise_cancellation.BVC(),
),
room_output_options=RoomOutputOptions(transcription_enabled=True),
)
# --- 3. 核心:监听 Metadata 变化 ---
@ctx.room.on("room_metadata_changed")
def on_metadata_changed(old_metadata: str, new_metadata: str):
logger.info(f"收到新的比赛状态: {new_metadata} (旧状态: {old_metadata})")
print(f"收到新的比赛状态: {new_metadata} (旧状态: {old_metadata})")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--avatar-url", type=str, default=None, help="Avatar dispatcher URL (e.g., http://localhost:8089/launch)")
args, remaining_args = parser.parse_known_args()
sys.argv = sys.argv[:1] + remaining_args
if args.avatar_url:
cli.run_app(WorkerOptions(entrypoint_fnc=partial(entrypoint, avatar_dispatcher_url=args.avatar_url), prewarm_fnc=prewarm))
else:
cli.run_app(WorkerOptions(entrypoint_fnc=entrypoint, prewarm_fnc=prewarm))

977
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,35 +6,37 @@
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
"lint": "next lint",
"format": "prettier --write ."
},
"dependencies": {
"@livekit/components-react": "^2.6.0",
"@livekit/components-styles": "^1.1.1",
"@radix-ui/react-dropdown-menu": "^2.0.6",
"cookies-next": "^4.1.1",
"framer-motion": "^10.16.16",
"@livekit/components-react": "^2.9.3",
"@livekit/components-styles": "^1.1.5",
"@radix-ui/react-dropdown-menu": "^2.1.2",
"cookies-next": "^4.3.0",
"framer-motion": "^10.18.0",
"js-yaml": "^4.1.0",
"livekit-client": "^2.5.1",
"livekit-server-sdk": "^2.6.1",
"livekit-client": "^2.9.5",
"livekit-server-sdk": "^2.13.0",
"lodash": "^4.17.21",
"next": "^14.0.4",
"next": "^14.2.20",
"next-plugin-preval": "^1.2.6",
"qrcode.react": "^4.0.0",
"react": "^18",
"react-dom": "^18"
"qrcode.react": "^4.1.0",
"react": "^18.3.1",
"react-dom": "^18.3.1"
},
"devDependencies": {
"@types/js-yaml": "^4.0.9",
"@types/lodash": "^4.17.0",
"@types/node": "^20.10.4",
"@types/react": "^18.2.43",
"@types/react-dom": "^18",
"autoprefixer": "^10.4.16",
"eslint": "^8",
"eslint-config-next": "14.2.15",
"postcss": "^8.4.31",
"tailwindcss": "^3.3.5",
"typescript": "^5.3.3"
"@types/lodash": "^4.17.13",
"@types/node": "^20.17.9",
"@types/react": "^18.3.14",
"@types/react-dom": "^18.3.3",
"autoprefixer": "^10.4.20",
"eslint": "^8.57.1",
"eslint-config-next": "14.2.26",
"postcss": "^8.4.49",
"tailwindcss": "^3.4.16",
"typescript": "^5.7.2",
"prettier": "^3.4.2"
}
}

View File

@@ -3,4 +3,4 @@ module.exports = {
tailwindcss: {},
autoprefixer: {},
},
}
};

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>

Before

Width:  |  Height:  |  Size: 629 B

View File

@@ -0,0 +1,60 @@
import React from "react";
import { AttributeItem } from "@/lib/types";
interface AttributeRowProps {
attribute: AttributeItem;
onKeyChange: (id: string, newKey: string) => void;
onValueChange: (id: string, newValue: string) => void;
onRemove?: (id: string) => void;
disabled?: boolean;
}
export const AttributeRow: React.FC<AttributeRowProps> = ({
attribute,
onKeyChange,
onValueChange,
onRemove,
disabled = false,
}) => {
return (
<div className="flex items-center gap-2 mb-2">
<input
value={attribute.key}
onChange={(e) => onKeyChange(attribute.id, e.target.value)}
className="flex-1 min-w-0 text-gray-400 text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-1 font-mono"
placeholder="Name"
disabled={disabled}
/>
<input
value={attribute.value}
onChange={(e) => onValueChange(attribute.id, e.target.value)}
className="flex-1 min-w-0 text-gray-400 text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-1 font-mono"
placeholder="Value"
disabled={disabled}
/>
{onRemove && (
<button
onClick={() => onRemove(attribute.id)}
className="flex-shrink-0 w-6 h-6 flex items-center justify-center text-gray-400 hover:text-white"
disabled={disabled}
style={{ display: disabled ? "none" : "flex" }}
>
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-4 w-4"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M6 18L18 6M6 6l12 12"
/>
</svg>
</button>
)}
</div>
);
};

View File

@@ -0,0 +1,268 @@
import React, { useState, useCallback, useEffect, useRef } from "react";
import { ConnectionState } from "livekit-client";
import { AttributeItem } from "@/lib/types";
import { Button } from "@/components/button/Button";
import { useLocalParticipant } from "@livekit/components-react";
import { AttributeRow } from "./AttributeRow";
interface AttributesInspectorProps {
attributes: AttributeItem[];
onAttributesChange: (attributes: AttributeItem[]) => void;
themeColor: string;
disabled?: boolean;
connectionState?: ConnectionState;
metadata?: string;
onMetadataChange?: (metadata: string) => void;
}
export const AttributesInspector: React.FC<AttributesInspectorProps> = ({
attributes,
onAttributesChange,
themeColor,
disabled = false,
connectionState,
metadata,
onMetadataChange,
}) => {
const [isExpanded, setIsExpanded] = useState(false);
const [isMetadataExpanded, setIsMetadataExpanded] = useState(false);
const [localAttributes, setLocalAttributes] =
useState<AttributeItem[]>(attributes);
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false);
const [showSyncFlash, setShowSyncFlash] = useState(false);
const { localParticipant } = useLocalParticipant();
const timeoutRef = useRef<NodeJS.Timeout>();
const syncFlashTimeoutRef = useRef<NodeJS.Timeout>();
// Update local attributes when props change
useEffect(() => {
setLocalAttributes(attributes);
}, [attributes]);
const syncAttributesWithRoom = useCallback(() => {
if (!localParticipant || connectionState !== ConnectionState.Connected)
return;
const attributesMap = localAttributes.reduce(
(acc, attr) => {
if (attr.key && attr.key.trim() !== "") {
acc[attr.key] = attr.value;
}
return acc;
},
{} as Record<string, string>,
);
localParticipant.setAttributes(attributesMap);
setHasUnsavedChanges(false);
setShowSyncFlash(true);
if (syncFlashTimeoutRef.current) {
clearTimeout(syncFlashTimeoutRef.current);
}
syncFlashTimeoutRef.current = setTimeout(
() => setShowSyncFlash(false),
1000,
);
}, [localAttributes, localParticipant, connectionState]);
// Handle debounced sync
useEffect(() => {
if (!hasUnsavedChanges) return;
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
timeoutRef.current = setTimeout(() => {
if (connectionState === ConnectionState.Connected && localParticipant) {
syncAttributesWithRoom();
}
}, 2000);
return () => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
};
}, [
hasUnsavedChanges,
syncAttributesWithRoom,
connectionState,
localParticipant,
]);
const handleKeyChange = (id: string, newKey: string) => {
const updatedAttributes = localAttributes.map((attr) =>
attr.id === id ? { ...attr, key: newKey } : attr,
);
setLocalAttributes(updatedAttributes);
onAttributesChange(updatedAttributes);
if (connectionState === ConnectionState.Connected && newKey.trim() !== "") {
setHasUnsavedChanges(true);
}
};
const handleValueChange = (id: string, newValue: string) => {
const updatedAttributes = localAttributes.map((attr) =>
attr.id === id ? { ...attr, value: newValue } : attr,
);
setLocalAttributes(updatedAttributes);
onAttributesChange(updatedAttributes);
if (connectionState === ConnectionState.Connected) {
setHasUnsavedChanges(true);
}
};
const handleRemoveAttribute = (id: string) => {
const updatedAttributes = localAttributes.filter((attr) => attr.id !== id);
setLocalAttributes(updatedAttributes);
onAttributesChange(updatedAttributes);
if (connectionState === ConnectionState.Connected) {
setHasUnsavedChanges(true);
}
};
const handleAddAttribute = () => {
const newId = `attr_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
const updatedAttributes = [
...localAttributes,
{ id: newId, key: "", value: "" },
];
setLocalAttributes(updatedAttributes);
onAttributesChange(updatedAttributes);
if (connectionState === ConnectionState.Connected) {
setHasUnsavedChanges(true);
}
};
return (
<div>
<div
className="flex items-center justify-between mb-2 cursor-pointer"
onClick={() => setIsExpanded(!isExpanded)}
>
<div className="text-sm text-gray-500">Attributes</div>
<svg
xmlns="http://www.w3.org/2000/svg"
className={`h-4 w-4 text-gray-500 transition-transform ${isExpanded ? "rotate-180" : ""}`}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M19 9l-7 7-7-7"
/>
</svg>
</div>
{isExpanded && (
<div className="border border-gray-800 rounded-sm bg-gray-900/30 p-3 mb-2">
{disabled ? (
localAttributes.length === 0 ? (
<div className="text-sm text-gray-400 font-sans">
No attributes set
</div>
) : (
localAttributes.map((attribute) => (
<AttributeRow
key={attribute.id}
attribute={attribute}
onKeyChange={handleKeyChange}
onValueChange={handleValueChange}
disabled={true}
/>
))
)
) : (
<>
{localAttributes.map((attribute) => (
<AttributeRow
key={attribute.id}
attribute={attribute}
onKeyChange={handleKeyChange}
onValueChange={handleValueChange}
onRemove={handleRemoveAttribute}
disabled={disabled}
/>
))}
<div className="flex justify-between items-center">
<Button
accentColor={themeColor}
onClick={handleAddAttribute}
className="text-xs flex items-center gap-1"
>
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-3 w-3"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M12 4v16m8-8H4"
/>
</svg>
Attribute
</Button>
{showSyncFlash && (
<div className="text-xs text-gray-400 animate-fade-in-out">
Changes saved
</div>
)}
</div>
</>
)}
</div>
)}
<>
<div
className="flex items-center justify-between mb-2 cursor-pointer"
onClick={() => setIsMetadataExpanded(!isMetadataExpanded)}
>
<div className="text-sm text-gray-500">Metadata</div>
<svg
xmlns="http://www.w3.org/2000/svg"
className={`h-4 w-4 text-gray-500 transition-transform ${isMetadataExpanded ? "rotate-180" : ""}`}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M19 9l-7 7-7-7"
/>
</svg>
</div>
{isMetadataExpanded &&
(disabled || connectionState === ConnectionState.Connected ? (
<div className="border border-gray-800 rounded-sm bg-gray-900/30 px-3 py-2 mb-4 min-h-[40px] flex items-center">
{metadata ? (
<pre className="w-full text-gray-400 text-xs bg-transparent font-mono whitespace-pre-wrap break-words m-0 p-0 border-0">
{metadata}
</pre>
) : (
<div className="text-sm text-gray-400 font-sans w-full text-left">
No metadata set
</div>
)}
</div>
) : (
<textarea
value={metadata}
onChange={(e) => onMetadataChange?.(e.target.value)}
className="w-full text-gray-400 text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-2 font-mono mb-4"
placeholder="Enter metadata..."
rows={3}
/>
))}
</>
</div>
);
};

View File

@@ -1,4 +1,4 @@
import { ReactNode } from "react";
import { ReactNode, useState } from "react";
import { PlaygroundDeviceSelector } from "@/components/playground/PlaygroundDeviceSelector";
import { TrackToggle } from "@livekit/components-react";
import { Track } from "livekit-client";
@@ -6,35 +6,66 @@ import { Track } from "livekit-client";
type ConfigurationPanelItemProps = {
title: string;
children?: ReactNode;
deviceSelectorKind?: MediaDeviceKind;
source?: Track.Source;
collapsible?: boolean;
defaultCollapsed?: boolean;
};
export const ConfigurationPanelItem: React.FC<ConfigurationPanelItemProps> = ({
children,
title,
deviceSelectorKind,
source,
collapsible = false,
defaultCollapsed = false,
}) => {
const [isCollapsed, setIsCollapsed] = useState(defaultCollapsed);
return (
<div className="w-full text-gray-300 py-4 border-b border-b-gray-800 relative">
<div className="flex flex-row justify-between items-center px-4 text-xs uppercase tracking-wider">
<h3>{title}</h3>
{deviceSelectorKind && (
<div className="flex items-center gap-2">
{source && (
<span className="flex flex-row gap-2">
<TrackToggle
className="px-2 py-1 bg-gray-900 text-gray-300 border border-gray-800 rounded-sm hover:bg-gray-800"
source={
deviceSelectorKind === "audioinput"
? Track.Source.Microphone
: Track.Source.Camera
}
source={source}
/>
<PlaygroundDeviceSelector kind={deviceSelectorKind} />
{source === Track.Source.Camera && (
<PlaygroundDeviceSelector kind="videoinput" />
)}
{source === Track.Source.Microphone && (
<PlaygroundDeviceSelector kind="audioinput" />
)}
</span>
)}
{collapsible && (
<button
onClick={() => setIsCollapsed(!isCollapsed)}
className="text-gray-400 hover:text-gray-300 transition-colors"
>
<svg
className={`w-4 h-4 transform transition-transform ${!isCollapsed ? "rotate-180" : ""}`}
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M19 9l-7 7-7-7"
/>
</svg>
</button>
)}
</div>
</div>
{!isCollapsed && (
<div className="px-4 py-2 text-xs text-gray-500 leading-normal">
{children}
</div>
)}
</div>
);
};

View File

@@ -52,11 +52,5 @@ export const EditableNameValueRow: React.FC<EditableNameValueRowProps> = ({
</div>
);
}
return (
<NameValueRow
name={name}
value={value}
valueColor={valueColor}
/>
);
return <NameValueRow name={name} value={value} valueColor={valueColor} />;
};

View File

@@ -24,12 +24,15 @@ import {
useTracks,
useVoiceAssistant,
useRoomContext,
useParticipantAttributes,
} from "@livekit/components-react";
import { ConnectionState, LocalParticipant, Track } from "livekit-client";
import { QRCodeSVG } from "qrcode.react";
import { ReactNode, useCallback, useEffect, useMemo, useState } from "react";
import tailwindTheme from "../../lib/tailwindTheme.preval";
import { EditableNameValueRow } from "@/components/config/NameValueRow";
import { AttributesInspector } from "@/components/config/AttributesInspector";
import { RpcPanel } from "./RpcPanel";
export interface PlaygroundMeta {
name: string;
@@ -62,6 +65,7 @@ export default function Playground({
const [rpcMethod, setRpcMethod] = useState("");
const [rpcPayload, setRpcPayload] = useState("");
const [showRpc, setShowRpc] = useState(false);
useEffect(() => {
if (roomState === ConnectionState.Connected) {
@@ -73,24 +77,27 @@ export default function Playground({
const agentVideoTrack = tracks.find(
(trackRef) =>
trackRef.publication.kind === Track.Kind.Video &&
trackRef.participant.isAgent
trackRef.participant.isAgent,
);
const localTracks = tracks.filter(
({ participant }) => participant instanceof LocalParticipant
({ participant }) => participant instanceof LocalParticipant,
);
const localVideoTrack = localTracks.find(
({ source }) => source === Track.Source.Camera
const localCameraTrack = localTracks.find(
({ source }) => source === Track.Source.Camera,
);
const localScreenTrack = localTracks.find(
({ source }) => source === Track.Source.ScreenShare,
);
const localMicTrack = localTracks.find(
({ source }) => source === Track.Source.Microphone
({ source }) => source === Track.Source.Microphone,
);
const onDataReceived = useCallback(
(msg: any) => {
if (msg.topic === "transcription") {
const decoded = JSON.parse(
new TextDecoder("utf-8").decode(msg.payload)
new TextDecoder("utf-8").decode(msg.payload),
);
let timestamp = new Date().getTime();
if ("timestamp" in decoded && decoded.timestamp > 0) {
@@ -107,24 +114,24 @@ export default function Playground({
]);
}
},
[transcripts]
[transcripts],
);
useDataChannel(onDataReceived);
const videoTileContent = useMemo(() => {
const videoFitClassName = `object-${config.video_fit || "cover"}`;
const videoFitClassName = `object-${config.video_fit || "contain"}`;
const disconnectedContent = (
<div className="flex items-center justify-center text-gray-700 text-center w-full h-full">
No video track. Connect to get started.
No agent video track. Connect to get started.
</div>
);
const loadingContent = (
<div className="flex flex-col items-center justify-center gap-2 text-gray-700 text-center h-full w-full">
<LoadingSVG />
Waiting for video track
Waiting for agent video track
</div>
);
@@ -155,25 +162,25 @@ export default function Playground({
document.body.style.setProperty(
"--lk-theme-color",
// @ts-ignore
tailwindTheme.colors[config.settings.theme_color]["500"]
tailwindTheme.colors[config.settings.theme_color]["500"],
);
document.body.style.setProperty(
"--lk-drop-shadow",
`var(--lk-theme-color) 0px 0px 18px`
`var(--lk-theme-color) 0px 0px 18px`,
);
}, [config.settings.theme_color]);
const audioTileContent = useMemo(() => {
const disconnectedContent = (
<div className="flex flex-col items-center justify-center gap-2 text-gray-700 text-center w-full">
No audio track. Connect to get started.
No agent audio track. Connect to get started.
</div>
);
const waitingContent = (
<div className="flex flex-col items-center gap-2 text-gray-700 text-center w-full">
<LoadingSVG />
Waiting for audio track
Waiting for agent audio track
</div>
);
@@ -207,7 +214,7 @@ export default function Playground({
]);
const chatTileContent = useMemo(() => {
if (voiceAssistant.audioTrack) {
if (voiceAssistant.agent) {
return (
<TranscriptionTile
agentAudioTrack={voiceAssistant.audioTrack}
@@ -216,102 +223,63 @@ export default function Playground({
);
}
return <></>;
}, [config.settings.theme_color, voiceAssistant.audioTrack]);
}, [
config.settings.theme_color,
voiceAssistant.audioTrack,
voiceAssistant.agent,
]);
const handleRpcCall = useCallback(async () => {
if (!voiceAssistant.agent || !room) return;
if (!voiceAssistant.agent || !room) {
throw new Error("No agent or room available");
}
try {
const response = await room.localParticipant.performRpc({
destinationIdentity: voiceAssistant.agent.identity,
method: rpcMethod,
payload: rpcPayload,
});
console.log('RPC response:', response);
} catch (e) {
console.error('RPC call failed:', e);
}
return response;
}, [room, rpcMethod, rpcPayload, voiceAssistant.agent]);
const agentAttributes = useParticipantAttributes({
participant: voiceAssistant.agent,
});
const settingsTileContent = useMemo(() => {
return (
<div className="flex flex-col gap-4 h-full w-full items-start overflow-y-auto">
<div className="flex flex-col h-full w-full items-start overflow-y-auto">
{config.description && (
<ConfigurationPanelItem title="Description">
{config.description}
</ConfigurationPanelItem>
)}
<ConfigurationPanelItem title="Settings">
<div className="flex flex-col gap-4">
<ConfigurationPanelItem title="Room">
<div className="flex flex-col gap-2">
<EditableNameValueRow
name="Room"
value={roomState === ConnectionState.Connected ? name : config.settings.room_name}
name="Room name"
value={
roomState === ConnectionState.Connected
? name
: config.settings.room_name
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.room_name = value;
setUserSettings(newSettings);
}}
placeholder="Enter room name"
placeholder="Auto"
editable={roomState !== ConnectionState.Connected}
/>
<EditableNameValueRow
name="Participant"
value={roomState === ConnectionState.Connected ?
(localParticipant?.identity || '') :
(config.settings.participant_name || '')}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.participant_name = value;
setUserSettings(newSettings);
}}
placeholder="Enter participant id"
editable={roomState !== ConnectionState.Connected}
/>
</div>
<div className="flex flex-col gap-2 mt-4">
<div className="text-xs text-gray-500 mt-2">RPC Method</div>
<input
type="text"
value={rpcMethod}
onChange={(e) => setRpcMethod(e.target.value)}
className="w-full text-white text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-2"
placeholder="RPC method name"
/>
<div className="text-xs text-gray-500 mt-2">RPC Payload</div>
<textarea
value={rpcPayload}
onChange={(e) => setRpcPayload(e.target.value)}
className="w-full text-white text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-2"
placeholder="RPC payload"
rows={2}
/>
<button
onClick={handleRpcCall}
disabled={!voiceAssistant.agent || !rpcMethod}
className={`mt-2 px-2 py-1 rounded-sm text-xs
${voiceAssistant.agent && rpcMethod
? `bg-${config.settings.theme_color}-500 hover:bg-${config.settings.theme_color}-600`
: 'bg-gray-700 cursor-not-allowed'
} text-white`}
>
Perform RPC Call
</button>
</div>
</ConfigurationPanelItem>
<ConfigurationPanelItem title="Status">
<div className="flex flex-col gap-2">
<NameValueRow
name="Room connected"
name="Status"
value={
roomState === ConnectionState.Connecting ? (
<LoadingSVG diameter={16} strokeWidth={2} />
) : (
roomState.toUpperCase()
roomState.charAt(0).toUpperCase() + roomState.slice(1)
)
}
valueColor={
@@ -320,15 +288,36 @@ export default function Playground({
: "gray-500"
}
/>
</div>
</ConfigurationPanelItem>
<ConfigurationPanelItem title="Agent">
<div className="flex flex-col gap-2">
<EditableNameValueRow
name="Agent name"
value={
roomState === ConnectionState.Connected
? config.settings.agent_name || "None"
: config.settings.agent_name || ""
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.agent_name = value;
setUserSettings(newSettings);
}}
placeholder="None"
editable={roomState !== ConnectionState.Connected}
/>
<NameValueRow
name="Agent connected"
name="Identity"
value={
voiceAssistant.agent ? (
"TRUE"
voiceAssistant.agent.identity
) : roomState === ConnectionState.Connected ? (
<LoadingSVG diameter={12} strokeWidth={2} />
) : (
"FALSE"
"No agent connected"
)
}
valueColor={
@@ -337,17 +326,126 @@ export default function Playground({
: "gray-500"
}
/>
{roomState === ConnectionState.Connected &&
voiceAssistant.agent && (
<AttributesInspector
attributes={Object.entries(
agentAttributes.attributes || {},
).map(([key, value], index) => ({
id: `agent-attr-${index}`,
key,
value: String(value),
}))}
onAttributesChange={() => {}}
themeColor={config.settings.theme_color}
disabled={true}
/>
)}
<p className="text-xs text-gray-500 text-right">
Set an agent name to use{" "}
<a
href="https://docs.livekit.io/agents/worker/dispatch#explicit"
target="_blank"
rel="noopener noreferrer"
className="text-gray-500 hover:text-gray-300 underline"
>
explicit dispatch
</a>
.
</p>
</div>
</ConfigurationPanelItem>
{localVideoTrack && (
<ConfigurationPanelItem title="User">
<div className="flex flex-col gap-2">
<EditableNameValueRow
name="Name"
value={
roomState === ConnectionState.Connected
? localParticipant?.name || ""
: config.settings.participant_name || ""
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.participant_name = value;
setUserSettings(newSettings);
}}
placeholder="Auto"
editable={roomState !== ConnectionState.Connected}
/>
<EditableNameValueRow
name="Identity"
value={
roomState === ConnectionState.Connected
? localParticipant?.identity || ""
: config.settings.participant_id || ""
}
valueColor={`${config.settings.theme_color}-500`}
onValueChange={(value) => {
const newSettings = { ...config.settings };
newSettings.participant_id = value;
setUserSettings(newSettings);
}}
placeholder="Auto"
editable={roomState !== ConnectionState.Connected}
/>
<AttributesInspector
attributes={config.settings.attributes || []}
onAttributesChange={(newAttributes) => {
const newSettings = { ...config.settings };
newSettings.attributes = newAttributes;
setUserSettings(newSettings);
}}
metadata={config.settings.metadata}
onMetadataChange={(metadata) => {
const newSettings = { ...config.settings };
newSettings.metadata = metadata;
setUserSettings(newSettings);
}}
themeColor={config.settings.theme_color}
disabled={false}
connectionState={roomState}
/>
</div>
</ConfigurationPanelItem>
{roomState === ConnectionState.Connected &&
config.settings.inputs.screen && (
<ConfigurationPanelItem
title="Camera"
deviceSelectorKind="videoinput"
title="Screen"
source={Track.Source.ScreenShare}
>
{localScreenTrack ? (
<div className="relative">
<VideoTrack
className="rounded-sm border border-gray-800 opacity-70 w-full"
trackRef={localVideoTrack}
trackRef={localScreenTrack}
/>
</div>
) : (
<div className="flex items-center justify-center text-gray-700 text-center w-full h-full">
Press the button above to share your screen.
</div>
)}
</ConfigurationPanelItem>
)}
{roomState === ConnectionState.Connected && voiceAssistant.agent && (
<RpcPanel
config={config}
rpcMethod={rpcMethod}
rpcPayload={rpcPayload}
setRpcMethod={setRpcMethod}
setRpcPayload={setRpcPayload}
handleRpcCall={handleRpcCall}
/>
)}
{localCameraTrack && (
<ConfigurationPanelItem title="Camera" source={Track.Source.Camera}>
<div className="relative">
<VideoTrack
className="rounded-sm border border-gray-800 opacity-70 w-full"
trackRef={localCameraTrack}
/>
</div>
</ConfigurationPanelItem>
@@ -355,7 +453,7 @@ export default function Playground({
{localMicTrack && (
<ConfigurationPanelItem
title="Microphone"
deviceSelectorKind="audioinput"
source={Track.Source.Microphone}
>
<AudioInputTile trackRef={localMicTrack} />
</ConfigurationPanelItem>
@@ -389,7 +487,8 @@ export default function Playground({
localParticipant,
name,
roomState,
localVideoTrack,
localCameraTrack,
localScreenTrack,
localMicTrack,
themeColors,
setUserSettings,
@@ -397,6 +496,8 @@ export default function Playground({
rpcMethod,
rpcPayload,
handleRpcCall,
showRpc,
setShowRpc,
]);
let mobileTabs: PlaygroundTab[] = [];
@@ -482,7 +583,7 @@ export default function Playground({
>
{config.settings.outputs.video && (
<PlaygroundTile
title="Video"
title="Agent Video"
className="w-full h-full grow"
childrenClassName="justify-center"
>
@@ -491,7 +592,7 @@ export default function Playground({
)}
{config.settings.outputs.audio && (
<PlaygroundTile
title="Audio"
title="Agent Audio"
className="w-full h-full grow"
childrenClassName="justify-center"
>

View File

@@ -66,7 +66,7 @@ export const PlaygroundTabbedTile: React.FC<PlaygroundTabbedTileProps> = ({
}) => {
const contentPadding = 4;
const [activeTab, setActiveTab] = useState(initialTab);
if(activeTab >= tabs.length) {
if (activeTab >= tabs.length) {
return null;
}
return (

View File

@@ -0,0 +1,107 @@
import { ConfigurationPanelItem } from "@/components/config/ConfigurationPanelItem";
import { useState } from "react";
import { LoadingSVG } from "@/components/button/LoadingSVG";
import { Button } from "@/components/button/Button";
interface RpcPanelProps {
config: any;
rpcMethod: string;
rpcPayload: string;
setRpcMethod: (method: string) => void;
setRpcPayload: (payload: string) => void;
handleRpcCall: () => Promise<any>;
}
export function RpcPanel({
config,
rpcMethod,
rpcPayload,
setRpcMethod,
setRpcPayload,
handleRpcCall,
}: RpcPanelProps) {
const [rpcResult, setRpcResult] = useState<{
success: boolean;
data: any;
} | null>(null);
const [isLoading, setIsLoading] = useState(false);
const handleCall = async () => {
setIsLoading(true);
setRpcResult(null);
try {
const result = await handleRpcCall();
setRpcResult({ success: true, data: result });
} catch (error) {
setRpcResult({
success: false,
data: error instanceof Error ? error.message : String(error),
});
} finally {
setIsLoading(false);
}
};
return (
<ConfigurationPanelItem
title="RPC"
collapsible={true}
defaultCollapsed={true}
>
<div className="flex flex-col gap-2">
<div className="text-xs text-gray-500 mt-2">Method Name</div>
<input
type="text"
value={rpcMethod}
onChange={(e) => setRpcMethod(e.target.value)}
className="w-full text-white text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-2"
placeholder="my_method"
/>
<div className="text-xs text-gray-500 mt-2">Payload</div>
<textarea
value={rpcPayload}
onChange={(e) => setRpcPayload(e.target.value)}
className="w-full text-white text-sm bg-transparent border border-gray-800 rounded-sm px-3 py-2"
placeholder='{"my": "payload"}'
rows={2}
/>
<Button
accentColor={config.settings.theme_color}
onClick={handleCall}
disabled={!rpcMethod || isLoading}
className="mt-2 text-xs flex items-center justify-center gap-2"
>
{isLoading ? (
<>
<LoadingSVG diameter={12} strokeWidth={2} />
Performing RPC...
</>
) : (
"Perform RPC"
)}
</Button>
{rpcResult && (
<>
<div className="text-xs text-gray-500 mt-2">
{rpcResult.success ? "Result" : "Error"}
</div>
<div
className={`w-full text-sm bg-transparent border rounded-sm px-3 py-2 ${
rpcResult.success
? "border-green-500 text-green-500"
: "border-red-500 text-red-500"
}`}
>
{typeof rpcResult.data === "object"
? JSON.stringify(rpcResult.data, null, 2)
: String(rpcResult.data)}
</div>
</>
)}
</div>
</ConfigurationPanelItem>
);
}

View File

@@ -2,7 +2,7 @@ import * as DropdownMenu from "@radix-ui/react-dropdown-menu";
import { CheckIcon, ChevronIcon } from "./icons";
import { useConfig } from "@/hooks/useConfig";
type SettingType = "inputs" | "outputs" | "chat" | "theme_color"
type SettingType = "inputs" | "outputs" | "chat" | "theme_color";
type SettingValue = {
title: string;
@@ -47,21 +47,27 @@ const settingsDropdown: SettingValue[] = [
type: "inputs",
key: "mic",
},
{
title: "Allow screenshare",
type: "inputs",
key: "screen",
},
];
export const SettingsDropdown = () => {
const {config, setUserSettings} = useConfig();
const { config, setUserSettings } = useConfig();
const isEnabled = (setting: SettingValue) => {
if (setting.type === "separator" || setting.type === "theme_color") return false;
if (setting.type === "separator" || setting.type === "theme_color")
return false;
if (setting.type === "chat") {
return config.settings[setting.type];
}
if(setting.type === "inputs") {
const key = setting.key as "camera" | "mic";
if (setting.type === "inputs") {
const key = setting.key as "camera" | "mic" | "screen";
return config.settings.inputs[key];
} else if(setting.type === "outputs") {
} else if (setting.type === "outputs") {
const key = setting.key as "video" | "audio";
return config.settings.outputs[key];
}
@@ -72,13 +78,13 @@ export const SettingsDropdown = () => {
const toggleSetting = (setting: SettingValue) => {
if (setting.type === "separator" || setting.type === "theme_color") return;
const newValue = !isEnabled(setting);
const newSettings = {...config.settings}
const newSettings = { ...config.settings };
if(setting.type === "chat") {
if (setting.type === "chat") {
newSettings.chat = newValue;
} else if(setting.type === "inputs") {
newSettings.inputs[setting.key as "camera" | "mic"] = newValue;
} else if(setting.type === "outputs") {
} else if (setting.type === "inputs") {
newSettings.inputs[setting.key as "camera" | "mic" | "screen"] = newValue;
} else if (setting.type === "outputs") {
newSettings.outputs[setting.key as "video" | "audio"] = newValue;
}
setUserSettings(newSettings);
@@ -86,11 +92,9 @@ export const SettingsDropdown = () => {
return (
<DropdownMenu.Root modal={false}>
<DropdownMenu.Trigger className="group inline-flex max-h-12 items-center gap-1 rounded-md hover:bg-gray-800 bg-gray-900 border-gray-800 p-1 pr-2 text-gray-100">
<button className="my-auto text-sm flex gap-1 pl-2 py-1 h-full items-center">
<DropdownMenu.Trigger className="group inline-flex max-h-12 items-center gap-1 rounded-md hover:bg-gray-800 bg-gray-900 border-gray-800 p-1 pr-2 text-gray-100 my-auto text-sm flex gap-1 pl-2 py-1 h-full items-center">
Settings
<ChevronIcon />
</button>
</DropdownMenu.Trigger>
<DropdownMenu.Portal>
<DropdownMenu.Content

View File

@@ -1,29 +1,28 @@
"use client"
"use client";
import React, { createContext, useState } from "react";
import { ToastType } from "./PlaygroundToast";
type ToastProviderData = {
setToastMessage: (
message: { message: string; type: ToastType } | null
message: { message: string; type: ToastType } | null,
) => void;
toastMessage: { message: string; type: ToastType } | null;
};
const ToastContext = createContext<ToastProviderData | undefined>(undefined);
export const ToastProvider = ({
children,
}: {
children: React.ReactNode;
}) => {
const [toastMessage, setToastMessage] = useState<{message: string, type: ToastType} | null>(null);
export const ToastProvider = ({ children }: { children: React.ReactNode }) => {
const [toastMessage, setToastMessage] = useState<{
message: string;
type: ToastType;
} | null>(null);
return (
<ToastContext.Provider
value={{
toastMessage,
setToastMessage
setToastMessage,
}}
>
{children}
@@ -37,4 +36,4 @@ export const useToast = () => {
throw new Error("useToast must be used within a ToastProvider");
}
return context;
}
};

View File

@@ -1,5 +1,6 @@
"use client";
import { AttributeItem } from "@/lib/types";
import { getCookie, setCookie } from "cookies-next";
import jsYaml from "js-yaml";
import { useRouter } from "next/navigation";
@@ -26,6 +27,7 @@ export type UserSettings = {
chat: boolean;
inputs: {
camera: boolean;
screen: boolean;
mic: boolean;
};
outputs: {
@@ -35,20 +37,25 @@ export type UserSettings = {
ws_url: string;
token: string;
room_name: string;
participant_id: string;
participant_name: string;
agent_name?: string;
metadata?: string;
attributes?: AttributeItem[];
};
// Fallback if NEXT_PUBLIC_APP_CONFIG is not set
const defaultConfig: AppConfig = {
title: "LiveKit Agents Playground",
description: "A playground for testing LiveKit Agents",
video_fit: "cover",
description: "A virtual workbench for testing multimodal AI agents.",
video_fit: "contain",
settings: {
editable: true,
theme_color: "cyan",
chat: true,
inputs: {
camera: true,
screen: true,
mic: true,
},
outputs: {
@@ -58,7 +65,10 @@ const defaultConfig: AppConfig = {
ws_url: "",
token: "",
room_name: "",
participant_id: "",
participant_name: "",
metadata: "",
attributes: [],
},
show_qr: false,
};
@@ -68,7 +78,7 @@ const useAppConfig = (): AppConfig => {
if (process.env.NEXT_PUBLIC_APP_CONFIG) {
try {
const parsedConfig = jsYaml.load(
process.env.NEXT_PUBLIC_APP_CONFIG
process.env.NEXT_PUBLIC_APP_CONFIG,
) as AppConfig;
if (parsedConfig.settings === undefined) {
parsedConfig.settings = defaultConfig.settings;
@@ -96,7 +106,7 @@ export const ConfigProvider = ({ children }: { children: React.ReactNode }) => {
const appConfig = useAppConfig();
const router = useRouter();
const [localColorOverride, setLocalColorOverride] = useState<string | null>(
null
null,
);
const getSettingsFromUrl = useCallback(() => {
@@ -117,6 +127,7 @@ export const ConfigProvider = ({ children }: { children: React.ReactNode }) => {
theme_color: params.get("theme_color"),
inputs: {
camera: params.get("cam") === "1",
screen: params.get("screen") === "1",
mic: params.get("mic") === "1",
},
outputs: {
@@ -127,6 +138,7 @@ export const ConfigProvider = ({ children }: { children: React.ReactNode }) => {
ws_url: "",
token: "",
room_name: "",
participant_id: "",
participant_name: "",
} as UserSettings;
}, [appConfig]);
@@ -148,6 +160,7 @@ export const ConfigProvider = ({ children }: { children: React.ReactNode }) => {
const obj = new URLSearchParams({
cam: boolToString(us.inputs.camera),
mic: boolToString(us.inputs.mic),
screen: boolToString(us.inputs.screen),
video: boolToString(us.outputs.video),
audio: boolToString(us.outputs.audio),
chat: boolToString(us.chat),
@@ -156,7 +169,7 @@ export const ConfigProvider = ({ children }: { children: React.ReactNode }) => {
// Note: We don't set ws_url and token to the URL on purpose
router.replace("/#" + obj.toString());
},
[router]
[router],
);
const setCookieSettings = useCallback((us: UserSettings) => {
@@ -216,7 +229,7 @@ export const ConfigProvider = ({ children }: { children: React.ReactNode }) => {
};
});
},
[appConfig, setCookieSettings, setUrlSettings]
[appConfig, setCookieSettings, setUrlSettings],
);
const [config, _setConfig] = useState<AppConfig>(getConfig());

View File

@@ -1,4 +1,4 @@
"use client"
"use client";
import { useCloud } from "@/cloud/useCloud";
import React, { createContext, useState } from "react";
@@ -6,7 +6,7 @@ import { useCallback } from "react";
import { useConfig } from "./useConfig";
import { useToast } from "@/components/toast/ToasterProvider";
export type ConnectionMode = "cloud" | "manual" | "env"
export type ConnectionMode = "cloud" | "manual" | "env";
type TokenGeneratorData = {
shouldConnect: boolean;
@@ -17,7 +17,9 @@ type TokenGeneratorData = {
connect: (mode: ConnectionMode) => Promise<void>;
};
const ConnectionContext = createContext<TokenGeneratorData | undefined>(undefined);
const ConnectionContext = createContext<TokenGeneratorData | undefined>(
undefined,
);
export const ConnectionProvider = ({
children,
@@ -54,16 +56,44 @@ export const ConnectionProvider = ({
throw new Error("NEXT_PUBLIC_LIVEKIT_URL is not set");
}
url = process.env.NEXT_PUBLIC_LIVEKIT_URL;
const params = new URLSearchParams();
const body: Record<string, any> = {};
if (config.settings.room_name) {
params.append('roomName', config.settings.room_name);
body.roomName = config.settings.room_name;
}
if (config.settings.participant_id) {
body.participantId = config.settings.participant_id;
}
if (config.settings.participant_name) {
params.append('participantName', config.settings.participant_name);
body.participantName = config.settings.participant_name;
}
const { accessToken } = await fetch(`/api/token?${params}`).then((res) =>
res.json()
if (config.settings.agent_name) {
body.agentName = config.settings.agent_name;
}
if (config.settings.metadata) {
body.metadata = config.settings.metadata;
}
const attributesArray = Array.isArray(config.settings.attributes)
? config.settings.attributes
: [];
if (attributesArray?.length) {
const attributes = attributesArray.reduce(
(acc, attr) => {
if (attr.key) {
acc[attr.key] = attr.value;
}
return acc;
},
{} as Record<string, string>,
);
body.attributes = attributes;
}
const { accessToken } = await fetch(`/api/token`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
}).then((res) => res.json());
token = accessToken;
} else {
token = config.settings.token;
@@ -77,9 +107,13 @@ export const ConnectionProvider = ({
config.settings.ws_url,
config.settings.room_name,
config.settings.participant_name,
config.settings.agent_name,
config.settings.participant_id,
config.settings.metadata,
config.settings.attributes,
generateToken,
setToastMessage,
]
],
);
const disconnect = useCallback(async () => {
@@ -108,4 +142,4 @@ export const useConnection = () => {
throw new Error("useConnection must be used within a ConnectionProvider");
}
return context;
}
};

View File

@@ -62,7 +62,7 @@ export const useMultibandTrackVolume = (
track?: Track,
bands: number = 5,
loPass: number = 100,
hiPass: number = 600
hiPass: number = 600,
) => {
const [frequencyBands, setFrequencyBands] = useState<Float32Array[]>([]);
@@ -93,7 +93,7 @@ export const useMultibandTrackVolume = (
const chunks: Float32Array[] = [];
for (let i = 0; i < bands; i++) {
chunks.push(
normalizedFrequencies.slice(i * chunkSize, (i + 1) * chunkSize)
normalizedFrequencies.slice(i * chunkSize, (i + 1) * chunkSize),
);
}

View File

@@ -14,3 +14,9 @@ export interface TokenResult {
identity: string;
accessToken: string;
}
export interface AttributeItem {
id: string;
key: string;
value: string;
}

View File

@@ -2,46 +2,92 @@ import { NextApiRequest, NextApiResponse } from "next";
import { generateRandomAlphanumeric } from "@/lib/util";
import { AccessToken } from "livekit-server-sdk";
import { RoomAgentDispatch, RoomConfiguration } from "@livekit/protocol";
import type { AccessTokenOptions, VideoGrant } from "livekit-server-sdk";
import { TokenResult } from "../../lib/types";
const apiKey = process.env.LIVEKIT_API_KEY;
const apiSecret = process.env.LIVEKIT_API_SECRET;
const createToken = (userInfo: AccessTokenOptions, grant: VideoGrant) => {
const createToken = (
userInfo: AccessTokenOptions,
grant: VideoGrant,
agentName?: string,
) => {
const at = new AccessToken(apiKey, apiSecret, userInfo);
at.addGrant(grant);
if (agentName) {
at.roomConfig = new RoomConfiguration({
agents: [
new RoomAgentDispatch({
agentName: agentName,
metadata: '{"user_id": "12345"}',
}),
],
});
}
return at.toJwt();
};
export default async function handleToken(
req: NextApiRequest,
res: NextApiResponse
res: NextApiResponse,
) {
try {
if (req.method !== "POST") {
res.setHeader("Allow", "POST");
res.status(405).end("Method Not Allowed");
return;
}
if (!apiKey || !apiSecret) {
res.statusMessage = "Environment variables aren't set up correctly";
res.status(500).end();
return;
}
const {
roomName: roomNameFromBody,
participantName: participantNameFromBody,
participantId: participantIdFromBody,
metadata: metadataFromBody,
attributes: attributesFromBody,
agentName: agentNameFromBody,
} = req.body;
// Get room name from query params or generate random one
const roomName = req.query.roomName as string ||
const roomName =
(roomNameFromBody as string) ||
`room-${generateRandomAlphanumeric(4)}-${generateRandomAlphanumeric(4)}`;
// Get participant name from query params or generate random one
const identity = req.query.participantName as string ||
const identity =
(participantIdFromBody as string) ||
`identity-${generateRandomAlphanumeric(4)}`;
// Get agent name from query params or use none (automatic dispatch)
const agentName = (agentNameFromBody as string) || undefined;
// Get metadata and attributes from query params
const metadata = metadataFromBody as string | undefined;
const attributesStr = attributesFromBody as string | undefined;
const attributes = attributesStr || {};
const participantName = participantNameFromBody || identity;
const grant: VideoGrant = {
room: roomName,
roomJoin: true,
canPublish: true,
canPublishData: true,
canSubscribe: true,
canUpdateOwnMetadata: true,
};
const token = await createToken({ identity }, grant);
const token = await createToken(
{ identity, metadata, attributes, name: participantName },
grant,
agentName,
);
const result: TokenResult = {
identity,
accessToken: token,

View File

@@ -12,7 +12,11 @@ import { PlaygroundConnect } from "@/components/PlaygroundConnect";
import Playground from "@/components/playground/Playground";
import { PlaygroundToast, ToastType } from "@/components/toast/PlaygroundToast";
import { ConfigProvider, useConfig } from "@/hooks/useConfig";
import { ConnectionMode, ConnectionProvider, useConnection } from "@/hooks/useConnection";
import {
ConnectionMode,
ConnectionProvider,
useConnection,
} from "@/hooks/useConnection";
import { useMemo } from "react";
import { ToastProvider, useToast } from "@/components/toast/ToasterProvider";
@@ -45,25 +49,25 @@ export function HomeInner() {
const { shouldConnect, wsUrl, token, mode, connect, disconnect } =
useConnection();
const {config} = useConfig();
const { config } = useConfig();
const { toastMessage, setToastMessage } = useToast();
const handleConnect = useCallback(
async (c: boolean, mode: ConnectionMode) => {
c ? connect(mode) : disconnect();
},
[connect, disconnect]
[connect, disconnect],
);
const showPG = useMemo(() => {
if (process.env.NEXT_PUBLIC_LIVEKIT_URL) {
return true;
}
if(wsUrl) {
if (wsUrl) {
return true;
}
return false;
}, [wsUrl])
}, [wsUrl]);
return (
<>

View File

@@ -17,10 +17,10 @@ export function TranscriptionTile({
agentAudioTrack,
accentColor,
}: {
agentAudioTrack: TrackReferenceOrPlaceholder;
agentAudioTrack?: TrackReferenceOrPlaceholder;
accentColor: string;
}) {
const agentMessages = useTrackTranscription(agentAudioTrack);
const agentMessages = useTrackTranscription(agentAudioTrack || undefined);
const localParticipant = useLocalParticipant();
const localMessages = useTrackTranscription({
publication: localParticipant.microphoneTrack,
@@ -29,38 +29,42 @@ export function TranscriptionTile({
});
const [transcripts, setTranscripts] = useState<Map<string, ChatMessageType>>(
new Map()
new Map(),
);
const [messages, setMessages] = useState<ChatMessageType[]>([]);
const { chatMessages, send: sendChat } = useChat();
// store transcripts
useEffect(() => {
if (agentAudioTrack) {
agentMessages.segments.forEach((s) =>
transcripts.set(
s.id,
segmentToChatMessage(
s,
transcripts.get(s.id),
agentAudioTrack.participant
)
)
agentAudioTrack.participant,
),
),
);
}
localMessages.segments.forEach((s) =>
transcripts.set(
s.id,
segmentToChatMessage(
s,
transcripts.get(s.id),
localParticipant.localParticipant
)
)
localParticipant.localParticipant,
),
),
);
const allMessages = Array.from(transcripts.values());
for (const msg of chatMessages) {
const isAgent =
msg.from?.identity === agentAudioTrack.participant?.identity;
const isAgent = agentAudioTrack
? msg.from?.identity === agentAudioTrack.participant?.identity
: msg.from?.identity !== localParticipant.localParticipant.identity;
const isSelf =
msg.from?.identity === localParticipant.localParticipant.identity;
let name = msg.from?.name;
@@ -86,9 +90,10 @@ export function TranscriptionTile({
transcripts,
chatMessages,
localParticipant.localParticipant,
agentAudioTrack.participant,
agentAudioTrack?.participant,
agentMessages.segments,
localMessages.segments,
agentAudioTrack,
]);
return (
@@ -99,7 +104,7 @@ export function TranscriptionTile({
function segmentToChatMessage(
s: TranscriptionSegment,
existingMessage: ChatMessageType | undefined,
participant: Participant
participant: Participant,
): ChatMessageType {
const msg: ChatMessageType = {
message: s.final ? s.text : `${s.text} ...`,

View File

@@ -1,9 +1,42 @@
/** @type {import('tailwindcss').Config} */
const colors = require('tailwindcss/colors')
const shades = ['50', '100', '200', '300', '400', '500', '600', '700', '800', '900', '950'];
const colorList = ['gray', 'green', 'cyan', 'amber', 'violet', 'blue', 'rose', 'pink', 'teal', "red"];
const uiElements = ['bg', 'selection:bg', 'border', 'text', 'hover:bg', 'hover:border', 'hover:text', 'ring', 'focus:ring'];
const colors = require("tailwindcss/colors");
const shades = [
"50",
"100",
"200",
"300",
"400",
"500",
"600",
"700",
"800",
"900",
"950",
];
const colorList = [
"gray",
"green",
"cyan",
"amber",
"violet",
"blue",
"rose",
"pink",
"teal",
"red",
];
const uiElements = [
"bg",
"selection:bg",
"border",
"text",
"hover:bg",
"hover:border",
"hover:text",
"ring",
"focus:ring",
];
const customColors = {
cyan: colors.cyan,
green: colors.green,
@@ -32,34 +65,30 @@ for (const [name, color] of Object.entries(customColors)) {
}
const safelist = [
'bg-black',
'bg-white',
'transparent',
'object-cover',
'object-contain',
"bg-black",
"bg-white",
"transparent",
"object-cover",
"object-contain",
...shadowNames,
...textShadowNames,
...shades.flatMap(shade => [
...colorList.flatMap(color => [
...uiElements.flatMap(element => [
`${element}-${color}-${shade}`,
]),
...shades.flatMap((shade) => [
...colorList.flatMap((color) => [
...uiElements.flatMap((element) => [`${element}-${color}-${shade}`]),
]),
]),
];
module.exports = {
content: [
"./src/**/*.{js,ts,jsx,tsx,mdx}",
],
content: ["./src/**/*.{js,ts,jsx,tsx,mdx}"],
theme: {
colors: {
transparent: 'transparent',
current: 'currentColor',
transparent: "transparent",
current: "currentColor",
black: colors.black,
white: colors.white,
gray: colors.neutral,
...customColors
...customColors,
},
extend: {
dropShadow: {
@@ -67,8 +96,19 @@ module.exports = {
},
boxShadow: {
...customShadows,
}
}
},
keyframes: {
"fade-in-out": {
"0%": { opacity: "0" },
"20%": { opacity: "1" },
"80%": { opacity: "1" },
"100%": { opacity: "0" },
},
},
animation: {
"fade-in-out": "fade-in-out 1s ease-in-out",
},
},
},
plugins: [],
safelist,