Compare commits

1 Commits

Author SHA1 Message Date
Xin Wang
9195957753 Merge branch 'engine-v3' 2026-03-11 11:42:29 +08:00
16 changed files with 267 additions and 1786 deletions

View File

@@ -302,8 +302,8 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> tuple[Dict[s
if config_mode == "dify":
metadata["services"]["llm"] = {
"provider": "dify",
"model": "dify",
"provider": "openai",
"model": "",
"apiKey": assistant.api_key,
"baseUrl": assistant.api_url,
}

View File

@@ -438,19 +438,3 @@ class TestAssistantAPI:
metadata = runtime_resp.json()["sessionStartMetadata"]
assert metadata["services"]["llm"]["provider"] == "fastgpt"
assert metadata["services"]["llm"]["appId"] == "app-fastgpt-123"
def test_dify_runtime_config_uses_dify_provider(self, client, sample_assistant_data):
sample_assistant_data.update({
"configMode": "dify",
"apiUrl": "https://api.dify.ai/v1",
"apiKey": "dify-key",
})
assistant_resp = client.post("/api/assistants", json=sample_assistant_data)
assert assistant_resp.status_code == 200
assistant_id = assistant_resp.json()["id"]
runtime_resp = client.get(f"/api/assistants/{assistant_id}/runtime-config")
assert runtime_resp.status_code == 200
metadata = runtime_resp.json()["sessionStartMetadata"]
assert metadata["services"]["llm"]["provider"] == "dify"
assert metadata["services"]["llm"]["model"] == "dify"

View File

@@ -28,7 +28,7 @@ from providers.tts.volcengine import VolcengineTTSService
_OPENAI_COMPATIBLE_PROVIDERS = {"openai_compatible", "openai-compatible", "siliconflow"}
_DASHSCOPE_PROVIDERS = {"dashscope"}
_VOLCENGINE_PROVIDERS = {"volcengine"}
_SUPPORTED_LLM_PROVIDERS = {"openai", "dify", "fastgpt", *_OPENAI_COMPATIBLE_PROVIDERS}
_SUPPORTED_LLM_PROVIDERS = {"openai", "fastgpt", *_OPENAI_COMPATIBLE_PROVIDERS}
class DefaultRealtimeServiceFactory(RealtimeServiceFactory):
@@ -58,16 +58,6 @@ class DefaultRealtimeServiceFactory(RealtimeServiceFactory):
def create_llm_service(self, spec: LLMServiceSpec) -> LLMPort:
provider = self._normalize_provider(spec.provider)
if provider == "dify" and spec.api_key and spec.base_url:
from providers.llm.dify import DifyLLMService
return DifyLLMService(
api_key=spec.api_key,
base_url=spec.base_url,
model=spec.model,
system_prompt=spec.system_prompt,
)
if provider == "fastgpt" and spec.api_key and spec.base_url:
from providers.llm.fastgpt import FastGPTLLMService

View File

@@ -1,6 +1,5 @@
"""LLM providers."""
from providers.llm.dify import DifyLLMService
from providers.llm.openai import MockLLMService, OpenAILLMService
try: # pragma: no cover - import depends on optional sibling SDK
@@ -9,7 +8,6 @@ except Exception: # pragma: no cover - provider remains lazily available via fa
FastGPTLLMService = None # type: ignore[assignment]
__all__ = [
"DifyLLMService",
"FastGPTLLMService",
"MockLLMService",
"OpenAILLMService",

View File

@@ -1,226 +0,0 @@
"""Dify-backed LLM provider."""
from __future__ import annotations
import asyncio
import json
import uuid
from typing import Any, AsyncIterator, Dict, List, Optional
import aiohttp
from loguru import logger
from providers.common.base import BaseLLMService, LLMMessage, LLMStreamEvent, ServiceState
class DifyLLMService(BaseLLMService):
"""LLM provider that delegates chat orchestration to Dify Service API."""
def __init__(
self,
*,
api_key: str,
base_url: str,
model: str = "dify",
system_prompt: Optional[str] = None,
):
super().__init__(model=model or "dify")
self.api_key = api_key
self.base_url = str(base_url or "").rstrip("/")
self.system_prompt = system_prompt or ""
self._session: Optional[aiohttp.ClientSession] = None
self._cancel_event = asyncio.Event()
self._conversation_id: Optional[str] = None
self._user_id = f"engine_{uuid.uuid4().hex}"
self._knowledge_config: Dict[str, Any] = {}
self._tool_schemas: List[Dict[str, Any]] = []
async def connect(self) -> None:
if not self.api_key:
raise ValueError("Dify API key not provided")
if not self.base_url:
raise ValueError("Dify base URL not provided")
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
self._session = aiohttp.ClientSession(headers=headers)
self.state = ServiceState.CONNECTED
logger.info("Dify LLM service connected: base_url={}", self.base_url)
async def disconnect(self) -> None:
if self._session is not None:
await self._session.close()
self._session = None
self.state = ServiceState.DISCONNECTED
logger.info("Dify LLM service disconnected")
def cancel(self) -> None:
self._cancel_event.set()
def set_knowledge_config(self, config: Optional[Dict[str, Any]]) -> None:
# Dify owns retriever orchestration in this provider mode.
self._knowledge_config = dict(config or {})
def set_tool_schemas(self, schemas: Optional[List[Dict[str, Any]]]) -> None:
# Dify owns tool/workflow orchestration in this provider mode.
self._tool_schemas = list(schemas or [])
async def get_initial_greeting(self) -> Optional[str]:
if self._session is None:
return None
url = f"{self.base_url}/parameters"
async with self._session.get(url, params={"user": self._user_id}) as response:
await self._raise_for_status(response, "Dify parameters request failed")
payload = await response.json()
opening_statement = str(payload.get("opening_statement") or "").strip()
return opening_statement or None
async def generate(
self,
messages: List[LLMMessage],
temperature: float = 0.7,
max_tokens: Optional[int] = None,
) -> str:
parts: List[str] = []
async for event in self.generate_stream(messages, temperature=temperature, max_tokens=max_tokens):
if event.type == "text_delta" and event.text:
parts.append(event.text)
return "".join(parts)
async def generate_stream(
self,
messages: List[LLMMessage],
temperature: float = 0.7,
max_tokens: Optional[int] = None,
) -> AsyncIterator[LLMStreamEvent]:
del temperature, max_tokens
if self._session is None:
raise RuntimeError("LLM service not connected")
query = self._extract_query(messages)
if not query:
yield LLMStreamEvent(type="done")
return
if self.system_prompt:
logger.debug("Ignoring local system prompt for Dify-managed assistant config")
payload: Dict[str, Any] = {
"inputs": {},
"query": query,
"user": self._user_id,
"response_mode": "streaming",
}
if self._conversation_id:
payload["conversation_id"] = self._conversation_id
self._cancel_event.clear()
url = f"{self.base_url}/chat-messages"
response = await self._session.post(url, json=payload)
try:
await self._raise_for_status(response, "Dify chat request failed")
async for event in self._iter_sse_events(response):
if self._cancel_event.is_set():
logger.info("Dify stream cancelled")
break
event_name = str(event.get("event") or "").strip().lower()
if event.get("conversation_id"):
self._conversation_id = str(event.get("conversation_id"))
if event_name in {"message", "agent_message"}:
text = self._extract_text_delta(event)
if text:
yield LLMStreamEvent(type="text_delta", text=text)
elif event_name == "error":
raise RuntimeError(str(event.get("message") or event.get("error") or "Dify stream error"))
elif event_name in {"message_end", "agent_message_end"}:
continue
finally:
response.close()
yield LLMStreamEvent(type="done")
@staticmethod
def _extract_query(messages: List[LLMMessage]) -> str:
for message in reversed(messages):
if str(message.role or "").strip().lower() == "user":
return str(message.content or "").strip()
for message in reversed(messages):
content = str(message.content or "").strip()
if content:
return content
return ""
@staticmethod
def _extract_text_delta(event: Dict[str, Any]) -> str:
for key in ("answer", "text", "content"):
value = event.get(key)
if value is not None:
text = str(value)
if text:
return text
return ""
async def _raise_for_status(self, response: aiohttp.ClientResponse, context: str) -> None:
if int(response.status) < 400:
return
try:
payload = await response.json()
except Exception:
payload = await response.text()
raise RuntimeError(f"{context}: HTTP {response.status} {payload}")
async def _iter_sse_events(self, response: aiohttp.ClientResponse) -> AsyncIterator[Dict[str, Any]]:
event_name = ""
data_lines: List[str] = []
async for raw_line in response.content:
line = raw_line.decode("utf-8", errors="ignore").rstrip("\r\n")
if not line:
payload = self._decode_sse_payload(event_name, data_lines)
event_name = ""
data_lines = []
if payload is not None:
yield payload
continue
if line.startswith(":"):
continue
if line.startswith("event:"):
event_name = line.split(":", 1)[1].strip()
continue
if line.startswith("data:"):
data_lines.append(line.split(":", 1)[1].lstrip())
payload = self._decode_sse_payload(event_name, data_lines)
if payload is not None:
yield payload
@staticmethod
def _decode_sse_payload(event_name: str, data_lines: List[str]) -> Optional[Dict[str, Any]]:
if not data_lines:
return None
raw = "\n".join(data_lines).strip()
if not raw:
return None
if raw == "[DONE]":
return {"event": "message_end"}
try:
payload = json.loads(raw)
except json.JSONDecodeError:
logger.debug("Skipping non-JSON Dify SSE payload: {}", raw)
return None
if not isinstance(payload, dict):
return None
if event_name and not payload.get("event"):
payload["event"] = event_name
return payload

View File

@@ -1,166 +0,0 @@
from typing import Any, Dict, List, Optional
import pytest
from providers.common.base import LLMMessage
from providers.llm.dify import DifyLLMService
class _FakeStreamResponse:
def __init__(self, lines: List[bytes], status: int = 200):
self.content = _FakeStreamContent(lines)
self.status = status
self.closed = False
async def json(self) -> Dict[str, Any]:
return {}
async def text(self) -> str:
return ""
def close(self) -> None:
self.closed = True
class _FakeJSONResponse:
def __init__(self, payload: Dict[str, Any], status: int = 200):
self.payload = payload
self.status = status
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
return False
async def json(self) -> Dict[str, Any]:
return dict(self.payload)
async def text(self) -> str:
return ""
class _FakeStreamContent:
def __init__(self, lines: List[bytes]):
self._lines = list(lines)
def __aiter__(self):
return self._iter()
async def _iter(self):
for line in self._lines:
yield line
class _FakeClientSession:
post_responses: List[_FakeStreamResponse] = []
get_payloads: List[Dict[str, Any]] = []
last_post_url: Optional[str] = None
last_post_json: Optional[Dict[str, Any]] = None
last_get_url: Optional[str] = None
last_get_params: Optional[Dict[str, Any]] = None
def __init__(self, headers: Optional[Dict[str, str]] = None):
self.headers = headers or {}
self.closed = False
async def close(self) -> None:
self.closed = True
async def post(self, url: str, json: Dict[str, Any]):
type(self).last_post_url = url
type(self).last_post_json = dict(json)
if not type(self).post_responses:
raise AssertionError("No fake Dify stream response queued")
return type(self).post_responses.pop(0)
def get(self, url: str, params: Dict[str, Any]):
type(self).last_get_url = url
type(self).last_get_params = dict(params)
if not type(self).get_payloads:
raise AssertionError("No fake Dify JSON payload queued")
return _FakeJSONResponse(type(self).get_payloads.pop(0))
@pytest.mark.asyncio
async def test_dify_provider_streams_message_answer_and_tracks_conversation(monkeypatch):
monkeypatch.setattr("providers.llm.dify.aiohttp.ClientSession", _FakeClientSession)
_FakeClientSession.post_responses = [
_FakeStreamResponse(
[
b'data: {"event":"message","conversation_id":"conv-1","answer":"Hello "}\n',
b"\n",
b'data: {"event":"agent_message","conversation_id":"conv-1","answer":"from Dify."}\n',
b"\n",
b'data: {"event":"message_end","conversation_id":"conv-1"}\n',
b"\n",
]
)
]
service = DifyLLMService(api_key="key", base_url="https://dify.example/v1")
await service.connect()
events = [event async for event in service.generate_stream([LLMMessage(role="user", content="Hi there")])]
assert [event.type for event in events] == ["text_delta", "text_delta", "done"]
assert events[0].text == "Hello "
assert events[1].text == "from Dify."
assert service._conversation_id == "conv-1"
assert _FakeClientSession.last_post_url == "https://dify.example/v1/chat-messages"
assert _FakeClientSession.last_post_json == {
"inputs": {},
"query": "Hi there",
"user": service._user_id,
"response_mode": "streaming",
}
@pytest.mark.asyncio
async def test_dify_provider_reuses_conversation_id_on_follow_up(monkeypatch):
monkeypatch.setattr("providers.llm.dify.aiohttp.ClientSession", _FakeClientSession)
_FakeClientSession.post_responses = [
_FakeStreamResponse(
[
b'data: {"event":"message","conversation_id":"conv-2","answer":"First"}\n',
b"\n",
]
),
_FakeStreamResponse(
[
b'data: {"event":"message","conversation_id":"conv-2","answer":"Second"}\n',
b"\n",
]
),
]
service = DifyLLMService(api_key="key", base_url="https://dify.example/v1")
await service.connect()
_ = [event async for event in service.generate_stream([LLMMessage(role="user", content="Turn one")])]
_ = [event async for event in service.generate_stream([LLMMessage(role="user", content="Turn two")])]
assert _FakeClientSession.last_post_json == {
"inputs": {},
"query": "Turn two",
"user": service._user_id,
"response_mode": "streaming",
"conversation_id": "conv-2",
}
@pytest.mark.asyncio
async def test_dify_provider_loads_initial_greeting_from_parameters(monkeypatch):
monkeypatch.setattr("providers.llm.dify.aiohttp.ClientSession", _FakeClientSession)
_FakeClientSession.get_payloads = [
{"opening_statement": "Hello from Dify."},
]
service = DifyLLMService(api_key="key", base_url="https://dify.example/v1")
await service.connect()
greeting = await service.get_initial_greeting()
assert greeting == "Hello from Dify."
assert _FakeClientSession.last_get_url == "https://dify.example/v1/parameters"
assert _FakeClientSession.last_get_params == {"user": service._user_id}

View File

@@ -1,32 +0,0 @@
from providers.factory.default import DefaultRealtimeServiceFactory
from providers.llm.dify import DifyLLMService
from providers.llm.openai import OpenAILLMService
from runtime.ports import LLMServiceSpec
def test_create_llm_service_dify_returns_dify_provider():
factory = DefaultRealtimeServiceFactory()
service = factory.create_llm_service(
LLMServiceSpec(
provider="dify",
model="dify",
api_key="test-key",
base_url="https://dify.example/v1",
)
)
assert isinstance(service, DifyLLMService)
def test_create_llm_service_openai_returns_openai_provider():
factory = DefaultRealtimeServiceFactory()
service = factory.create_llm_service(
LLMServiceSpec(
provider="openai",
model="gpt-4o-mini",
api_key="test-key",
base_url="https://api.openai.com/v1",
)
)
assert isinstance(service, OpenAILLMService)

View File

@@ -1,67 +0,0 @@
import React from 'react';
import { cn } from '@/lib/utils';
import { Badge } from '@/components/UI';
import type { DebugTranscriptTextRow } from './types';
const roleLabelMap: Record<DebugTranscriptTextRow['role'], string> = {
user: 'Me',
assistant: 'AI',
notice: 'Debug',
};
const MessageText: React.FC<{
row: DebugTranscriptTextRow;
}> = ({ row }) => {
if (row.role === 'notice') {
return (
<div className="flex justify-start">
<div className="max-w-full rounded-md border border-white/10 bg-black/25 px-3 py-2 text-[11px] text-muted-foreground">
<div className="mb-1 flex items-center gap-2">
<span className="uppercase tracking-[0.14em] opacity-70">{roleLabelMap[row.role]}</span>
</div>
<div className="whitespace-pre-wrap break-words">{row.text}</div>
</div>
</div>
);
}
const isUser = row.role === 'user';
return (
<div className={cn('flex', isUser ? 'justify-end' : 'justify-start')}>
<div
className={cn(
'max-w-[85%] rounded-lg px-3 py-2 text-sm',
isUser
? 'bg-primary text-primary-foreground'
: 'bg-card border border-white/10 shadow-sm text-foreground'
)}
>
<div className="mb-1 flex flex-wrap items-center gap-1.5">
<span className="text-[10px] uppercase tracking-wider opacity-70">
{roleLabelMap[row.role]}
</span>
{row.role === 'assistant' &&
typeof row.ttfbMs === 'number' &&
Number.isFinite(row.ttfbMs) && (
<Badge
variant="outline"
className="border-cyan-300/40 bg-cyan-500/10 px-1.5 py-0.5 text-[10px] text-cyan-200"
>
TTFB {Math.round(row.ttfbMs)}ms
</Badge>
)}
{row.role === 'assistant' && row.isStreaming && (
<span className="inline-flex h-2 w-2 animate-pulse rounded-full bg-primary/80" />
)}
</div>
<div className="whitespace-pre-wrap break-words">{row.text}</div>
</div>
</div>
);
};
export default React.memo(MessageText);

View File

@@ -1,162 +0,0 @@
import React, { useEffect, useMemo, useState } from 'react';
import { ChevronDown, ChevronRight, Wrench } from 'lucide-react';
import { Badge, Button } from '@/components/UI';
import { cn } from '@/lib/utils';
import type { DebugTranscriptToolRow } from './types';
const shouldAutoExpand = (status: DebugTranscriptToolRow['status']) =>
status === 'pending' || status === 'error' || status === 'timeout';
const formatStructuredValue = (value: unknown) => {
if (value === undefined || value === null) return '';
if (typeof value === 'string') {
const trimmed = value.trim();
if (!trimmed) return '';
try {
return JSON.stringify(JSON.parse(trimmed), null, 2);
} catch {
return value;
}
}
if (typeof value === 'number' || typeof value === 'boolean') {
return String(value);
}
try {
return JSON.stringify(value, null, 2);
} catch {
return String(value);
}
};
const getStatusBadge = (status: DebugTranscriptToolRow['status']) => {
if (status === 'success') {
return { label: 'Success', variant: 'success' as const, className: '' };
}
if (status === 'pending') {
return { label: 'Pending', variant: 'warning' as const, className: '' };
}
if (status === 'timeout') {
return {
label: 'Timeout',
variant: 'outline' as const,
className: 'border-orange-400/40 bg-orange-500/10 text-orange-200',
};
}
return {
label: 'Error',
variant: 'outline' as const,
className: 'border-rose-400/40 bg-rose-500/10 text-rose-200',
};
};
const Section: React.FC<{
label: string;
value: unknown;
defaultOpen?: boolean;
}> = ({ label, value, defaultOpen = true }) => {
const formattedValue = useMemo(() => formatStructuredValue(value), [value]);
if (!formattedValue) return null;
return (
<details
open={defaultOpen}
className="rounded-md border border-white/10 bg-black/20"
>
<summary className="cursor-pointer list-none px-3 py-2 text-xs font-medium text-muted-foreground">
{label}
</summary>
<pre className="overflow-x-auto whitespace-pre-wrap break-all border-t border-white/10 px-3 py-3 text-[11px] leading-5 text-foreground/90">
{formattedValue}
</pre>
</details>
);
};
const MessageTool: React.FC<{
row: DebugTranscriptToolRow;
nested?: boolean;
}> = ({ row, nested = false }) => {
const [isExpanded, setIsExpanded] = useState(() => shouldAutoExpand(row.status));
useEffect(() => {
setIsExpanded(shouldAutoExpand(row.status));
}, [row.status]);
const statusBadge = getStatusBadge(row.status);
return (
<div className={cn(nested ? 'w-full' : 'flex justify-start')}>
<div
className={cn(
'w-full max-w-full border border-amber-400/30 bg-amber-500/10 p-3 text-amber-50',
nested ? 'rounded-md' : 'rounded-lg'
)}
>
<div className="flex items-start justify-between gap-3">
<div className="min-w-0 flex-1">
<div className="flex flex-wrap items-center gap-2">
<div className="flex items-center gap-2">
<div className="flex h-7 w-7 items-center justify-center rounded-md border border-amber-300/30 bg-black/20 text-amber-200">
<Wrench className="h-4 w-4" />
</div>
<div className="min-w-0">
<div className="truncate text-sm font-medium text-foreground">
{row.toolDisplayName || row.toolName}
</div>
<div className="truncate text-[11px] text-amber-100/70">
{row.toolName}
</div>
</div>
</div>
<Badge variant={statusBadge.variant} className={statusBadge.className}>
{statusBadge.label}
</Badge>
<Badge variant="outline" className="border-white/15 bg-black/10 text-[10px] uppercase">
{row.executor}
</Badge>
{row.source && (
<Badge variant="outline" className="border-white/15 bg-black/10 text-[10px] uppercase">
{row.source}
</Badge>
)}
</div>
<div className="mt-2 text-[11px] text-amber-100/70">
tool_call_id: <span className="font-mono">{row.toolCallId}</span>
</div>
</div>
<Button
type="button"
variant="ghost"
size="sm"
className="h-8 px-2 text-amber-100 hover:bg-white/10 hover:text-foreground"
onClick={() => setIsExpanded((value) => !value)}
aria-expanded={isExpanded}
>
{isExpanded ? <ChevronDown className="h-4 w-4" /> : <ChevronRight className="h-4 w-4" />}
</Button>
</div>
<div
className={cn(
'grid transition-all',
isExpanded ? 'mt-3 grid-rows-[1fr] opacity-100' : 'grid-rows-[0fr] opacity-0'
)}
>
<div className="min-h-0 overflow-hidden">
<div className="space-y-2 pt-1">
<Section label="Arguments" value={row.args} defaultOpen={row.status === 'pending'} />
<Section label="Result" value={row.result} />
<Section label="Error" value={row.error} defaultOpen />
<Section label="Raw call" value={row.rawCall} defaultOpen={false} />
<Section label="Raw result" value={row.rawResult} defaultOpen={false} />
</div>
</div>
</div>
</div>
</div>
);
};
export default React.memo(MessageTool);

View File

@@ -1,53 +0,0 @@
import React from 'react';
import { Badge } from '@/components/UI';
import MessageTool from './MessageTool';
import type { DebugTranscriptTextRow, DebugTranscriptToolRow } from './types';
const TranscriptAssistantBlock: React.FC<{
message?: DebugTranscriptTextRow;
tools: DebugTranscriptToolRow[];
}> = ({ message, tools }) => {
const isStreaming = Boolean(message?.isStreaming) || tools.some((tool) => tool.status === 'pending');
return (
<div className="flex justify-start">
<div className="w-full max-w-[85%] rounded-lg border border-white/10 bg-card px-3 py-2 text-sm text-foreground shadow-sm">
<div className="mb-1 flex flex-wrap items-center gap-1.5">
<span className="text-[10px] uppercase tracking-wider opacity-70">AI</span>
{typeof message?.ttfbMs === 'number' && Number.isFinite(message.ttfbMs) && (
<Badge
variant="outline"
className="border-cyan-300/40 bg-cyan-500/10 px-1.5 py-0.5 text-[10px] text-cyan-200"
>
TTFB {Math.round(message.ttfbMs)}ms
</Badge>
)}
{tools.length > 0 && (
<Badge variant="outline" className="border-white/15 bg-black/10 px-1.5 py-0.5 text-[10px]">
{tools.length} tool{tools.length > 1 ? 's' : ''}
</Badge>
)}
{isStreaming && <span className="inline-flex h-2 w-2 animate-pulse rounded-full bg-primary/80" />}
</div>
{message?.text ? (
<div className="whitespace-pre-wrap break-words">{message.text}</div>
) : null}
{tools.length > 0 && (
<div className={message?.text ? 'mt-3 border-t border-white/10 pt-3' : 'mt-1'}>
<div className="space-y-2">
{tools.map((tool) => (
<MessageTool key={tool.id} row={tool} nested />
))}
</div>
</div>
)}
</div>
</div>
);
};
export default React.memo(TranscriptAssistantBlock);

View File

@@ -1,18 +0,0 @@
import React from 'react';
import MessageText from './MessageText';
import MessageTool from './MessageTool';
import type { DebugTranscriptRow } from './types';
const TranscriptItem: React.FC<{
row: DebugTranscriptRow;
}> = ({ row }) => {
if (row.kind === 'tool') {
return <MessageTool row={row} />;
}
return <MessageText row={row} />;
};
export default React.memo(TranscriptItem);

View File

@@ -1,123 +0,0 @@
import React, { useMemo } from 'react';
import { MessageSquare } from 'lucide-react';
import { cn } from '@/lib/utils';
import TranscriptAssistantBlock from './TranscriptAssistantBlock';
import TranscriptItem from './TranscriptItem';
import type { DebugTranscriptRow, DebugTranscriptTextRow, DebugTranscriptToolRow } from './types';
type AssistantRenderBlock = {
kind: 'assistant-block';
id: string;
message?: DebugTranscriptTextRow;
tools: DebugTranscriptToolRow[];
};
type TranscriptRenderItem =
| { kind: 'row'; id: string; row: DebugTranscriptRow }
| AssistantRenderBlock;
const getCorrelationKey = (row: Pick<DebugTranscriptRow, 'turnId' | 'utteranceId' | 'responseId'>) => {
if (row.responseId) return `response:${row.responseId}`;
if (row.turnId && row.utteranceId) return `turn:${row.turnId}:utterance:${row.utteranceId}`;
if (row.turnId) return `turn:${row.turnId}`;
if (row.utteranceId) return `utterance:${row.utteranceId}`;
return '';
};
const buildRenderItems = (messages: DebugTranscriptRow[]): TranscriptRenderItem[] => {
const items: TranscriptRenderItem[] = [];
const assistantBlocks = new Map<string, AssistantRenderBlock>();
messages.forEach((row) => {
if (row.kind === 'text' && row.role === 'assistant') {
const correlationKey = getCorrelationKey(row);
if (!correlationKey) {
items.push({ kind: 'row', id: row.id, row });
return;
}
const existingBlock = assistantBlocks.get(correlationKey);
if (existingBlock) {
existingBlock.message = row;
return;
}
const block: AssistantRenderBlock = {
kind: 'assistant-block',
id: `assistant-block:${correlationKey}`,
message: row,
tools: [],
};
assistantBlocks.set(correlationKey, block);
items.push(block);
return;
}
if (row.kind === 'tool') {
const correlationKey = getCorrelationKey(row);
if (!correlationKey) {
items.push({ kind: 'row', id: row.id, row });
return;
}
const existingBlock = assistantBlocks.get(correlationKey);
if (existingBlock) {
existingBlock.tools.push(row);
return;
}
const block: AssistantRenderBlock = {
kind: 'assistant-block',
id: `assistant-block:${correlationKey}`,
tools: [row],
};
assistantBlocks.set(correlationKey, block);
items.push(block);
return;
}
items.push({ kind: 'row', id: row.id, row });
});
return items;
};
const TranscriptList: React.FC<{
scrollRef: React.RefObject<HTMLDivElement | null>;
messages: DebugTranscriptRow[];
isLoading: boolean;
className?: string;
}> = ({ scrollRef, messages, isLoading, className = '' }) => {
const renderItems = useMemo(() => buildRenderItems(messages), [messages]);
return (
<div
ref={scrollRef}
className={cn(
'flex-1 overflow-y-auto overflow-x-hidden rounded-md border border-white/5 bg-black/20 p-2 min-h-0 custom-scrollbar',
className
)}
>
{messages.length === 0 && !isLoading ? (
<div className="flex h-full flex-col items-center justify-center space-y-3 text-muted-foreground/60">
<MessageSquare className="h-8 w-8 opacity-20" />
<p className="text-xs"></p>
</div>
) : (
<div className="space-y-4 pb-4">
{renderItems.map((item) =>
item.kind === 'assistant-block' ? (
<TranscriptAssistantBlock key={item.id} message={item.message} tools={item.tools} />
) : (
<TranscriptItem key={item.id} row={item.row} />
)
)}
</div>
)}
</div>
);
};
export default React.memo(TranscriptList);

View File

@@ -1,637 +0,0 @@
import type {
DebugTranscriptRow,
DebugTranscriptTextRole,
DebugTranscriptTextRow,
DebugTranscriptToolRow,
DebugTranscriptToolStatus,
} from './types';
let rowCounter = 0;
const createRowId = (prefix: string) => `${prefix}_${Date.now()}_${++rowCounter}`;
const isTextRow = (
row: DebugTranscriptRow | undefined | null
): row is DebugTranscriptTextRow => row?.kind === 'text';
const isToolRow = (
row: DebugTranscriptRow | undefined | null
): row is DebugTranscriptToolRow => row?.kind === 'tool';
const findRowIndexById = (rows: DebugTranscriptRow[], rowId?: string | null) =>
rowId ? rows.findIndex((row) => row.id === rowId) : -1;
const findAssistantRowIndexByResponseId = (rows: DebugTranscriptRow[], responseId?: string) => {
if (!responseId) return -1;
return rows.findIndex(
(row) => isTextRow(row) && row.role === 'assistant' && row.responseId === responseId
);
};
const findLastAssistantCandidateIndex = (
rows: DebugTranscriptRow[],
responseId?: string
) => {
for (let i = rows.length - 1; i >= 0; i -= 1) {
const row = rows[i];
if (isTextRow(row) && row.role === 'assistant') {
if (responseId && row.responseId && row.responseId !== responseId) {
break;
}
return i;
}
if (isTextRow(row) && row.role === 'user') {
break;
}
}
return -1;
};
const createTextRow = ({
role,
text,
turnId,
utteranceId,
responseId,
ttfbMs,
isStreaming,
}: {
role: DebugTranscriptTextRole;
text: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
ttfbMs?: number;
isStreaming?: boolean;
}): DebugTranscriptTextRow => ({
kind: 'text',
id: createRowId(role),
role,
text,
...(turnId ? { turnId } : {}),
...(utteranceId ? { utteranceId } : {}),
...(responseId ? { responseId } : {}),
...(typeof ttfbMs === 'number' ? { ttfbMs } : {}),
...(typeof isStreaming === 'boolean' ? { isStreaming } : {}),
});
const createToolRow = ({
toolCallId,
toolName,
toolDisplayName,
executor,
turnId,
utteranceId,
responseId,
source,
status,
args,
result,
error,
rawCall,
rawResult,
}: {
toolCallId: string;
toolName: string;
toolDisplayName: string;
executor: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
source?: string;
status: DebugTranscriptToolStatus;
args?: unknown;
result?: unknown;
error?: unknown;
rawCall?: unknown;
rawResult?: unknown;
}): DebugTranscriptToolRow => ({
kind: 'tool',
id: toolCallId || createRowId('tool'),
toolCallId,
toolName,
toolDisplayName,
executor,
...(turnId ? { turnId } : {}),
...(utteranceId ? { utteranceId } : {}),
...(responseId ? { responseId } : {}),
...(source ? { source } : {}),
status,
...(args !== undefined ? { args } : {}),
...(result !== undefined ? { result } : {}),
...(error !== undefined ? { error } : {}),
...(rawCall !== undefined ? { rawCall } : {}),
...(rawResult !== undefined ? { rawResult } : {}),
});
export const resetTranscriptRows = (): DebugTranscriptRow[] => [];
export const appendTextRow = (
rows: DebugTranscriptRow[],
{
role,
text,
turnId,
utteranceId,
responseId,
ttfbMs,
isStreaming,
}: {
role: DebugTranscriptTextRole;
text: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
ttfbMs?: number;
isStreaming?: boolean;
}
): DebugTranscriptRow[] => [
...rows,
createTextRow({ role, text, turnId, utteranceId, responseId, ttfbMs, isStreaming }),
];
export const appendNoticeRow = (rows: DebugTranscriptRow[], text: string) =>
appendTextRow(rows, { role: 'notice', text, isStreaming: false });
export const updateUserDraftRow = (
rows: DebugTranscriptRow[],
{
draftRowId,
text,
turnId,
utteranceId,
}: {
draftRowId?: string | null;
text: string;
turnId?: string;
utteranceId?: string;
}
): { rows: DebugTranscriptRow[]; draftRowId: string } => {
const rowIndex = findRowIndexById(rows, draftRowId);
if (rowIndex !== -1) {
const row = rows[rowIndex];
if (isTextRow(row) && row.role === 'user') {
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
text,
turnId: row.turnId || turnId,
utteranceId: row.utteranceId || utteranceId,
isStreaming: true,
};
return { rows: nextRows, draftRowId: row.id };
}
}
const nextRow = createTextRow({
role: 'user',
text,
turnId,
utteranceId,
isStreaming: true,
});
return {
rows: [...rows, nextRow],
draftRowId: nextRow.id,
};
};
export const finalizeUserDraftRow = (
rows: DebugTranscriptRow[],
{
draftRowId,
text,
turnId,
utteranceId,
}: {
draftRowId?: string | null;
text: string;
turnId?: string;
utteranceId?: string;
}
): { rows: DebugTranscriptRow[]; draftRowId: null } => {
const rowIndex = findRowIndexById(rows, draftRowId);
if (rowIndex !== -1) {
const row = rows[rowIndex];
if (isTextRow(row) && row.role === 'user') {
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
text: text || row.text,
turnId: row.turnId || turnId,
utteranceId: row.utteranceId || utteranceId,
isStreaming: false,
};
return { rows: nextRows, draftRowId: null };
}
}
if (!text) {
return { rows, draftRowId: null };
}
return {
rows: appendTextRow(rows, {
role: 'user',
text,
turnId,
utteranceId,
isStreaming: false,
}),
draftRowId: null,
};
};
export const updateAssistantDeltaRow = (
rows: DebugTranscriptRow[],
{
draftRowId,
delta,
turnId,
utteranceId,
responseId,
ttfbMs,
}: {
draftRowId?: string | null;
delta: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
ttfbMs?: number;
}
): { rows: DebugTranscriptRow[]; draftRowId: string } => {
let rowIndex = findRowIndexById(rows, draftRowId);
if (
rowIndex !== -1 &&
(!isTextRow(rows[rowIndex]) || rows[rowIndex].role !== 'assistant')
) {
rowIndex = -1;
}
if (rowIndex === -1) {
rowIndex = findAssistantRowIndexByResponseId(rows, responseId);
}
if (rowIndex === -1) {
rowIndex = findLastAssistantCandidateIndex(rows, responseId);
}
if (rowIndex === -1) {
const lastRow = rows[rows.length - 1];
if (
isTextRow(lastRow) &&
lastRow.role === 'assistant' &&
lastRow.text === delta &&
lastRow.responseId === responseId
) {
return {
rows,
draftRowId: lastRow.id,
};
}
const nextRow = createTextRow({
role: 'assistant',
text: delta,
turnId,
utteranceId,
responseId,
ttfbMs,
isStreaming: true,
});
return {
rows: [...rows, nextRow],
draftRowId: nextRow.id,
};
}
const row = rows[rowIndex];
if (!isTextRow(row) || row.role !== 'assistant') {
return {
rows,
draftRowId: draftRowId || createRowId('assistant'),
};
}
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
text: row.text + delta,
turnId: row.turnId || turnId,
utteranceId: row.utteranceId || utteranceId,
responseId: row.responseId || responseId,
ttfbMs: typeof row.ttfbMs === 'number' ? row.ttfbMs : ttfbMs,
isStreaming: true,
};
return {
rows: nextRows,
draftRowId: row.id,
};
};
export const finalizeAssistantTextRow = (
rows: DebugTranscriptRow[],
{
draftRowId,
text,
turnId,
utteranceId,
responseId,
ttfbMs,
}: {
draftRowId?: string | null;
text: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
ttfbMs?: number;
}
): { rows: DebugTranscriptRow[]; draftRowId: null } => {
let rowIndex = findRowIndexById(rows, draftRowId);
if (
rowIndex !== -1 &&
(!isTextRow(rows[rowIndex]) || rows[rowIndex].role !== 'assistant')
) {
rowIndex = -1;
}
if (rowIndex === -1) {
rowIndex = findAssistantRowIndexByResponseId(rows, responseId);
}
if (rowIndex === -1) {
rowIndex = findLastAssistantCandidateIndex(rows, responseId);
}
if (rowIndex !== -1) {
const row = rows[rowIndex];
if (isTextRow(row) && row.role === 'assistant') {
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
text: text || row.text,
turnId: row.turnId || turnId,
utteranceId: row.utteranceId || utteranceId,
responseId: row.responseId || responseId,
ttfbMs: typeof row.ttfbMs === 'number' ? row.ttfbMs : ttfbMs,
isStreaming: false,
};
return {
rows: nextRows,
draftRowId: null,
};
}
}
if (!text) {
return { rows, draftRowId: null };
}
const lastRow = rows[rows.length - 1];
if (
isTextRow(lastRow) &&
lastRow.role === 'assistant' &&
(!responseId || !lastRow.responseId || lastRow.responseId === responseId)
) {
if (lastRow.text === text) {
return { rows, draftRowId: null };
}
if (text.startsWith(lastRow.text) || lastRow.text.startsWith(text)) {
const nextRows = [...rows];
nextRows[nextRows.length - 1] = {
...lastRow,
text,
turnId: lastRow.turnId || turnId,
utteranceId: lastRow.utteranceId || utteranceId,
responseId: lastRow.responseId || responseId,
ttfbMs: typeof lastRow.ttfbMs === 'number' ? lastRow.ttfbMs : ttfbMs,
isStreaming: false,
};
return {
rows: nextRows,
draftRowId: null,
};
}
}
return {
rows: appendTextRow(rows, {
role: 'assistant',
text,
turnId,
utteranceId,
responseId,
ttfbMs,
isStreaming: false,
}),
draftRowId: null,
};
};
export const attachAssistantTtfb = (
rows: DebugTranscriptRow[],
{
responseId,
ttfbMs,
}: {
responseId?: string;
ttfbMs: number;
}
) => {
const rowIndex =
findAssistantRowIndexByResponseId(rows, responseId) !== -1
? findAssistantRowIndexByResponseId(rows, responseId)
: findLastAssistantCandidateIndex(rows, responseId);
if (rowIndex === -1) {
return rows;
}
const row = rows[rowIndex];
if (!isTextRow(row) || row.role !== 'assistant') {
return rows;
}
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
ttfbMs,
};
return nextRows;
};
export const trimInterruptedResponseRows = (
rows: DebugTranscriptRow[],
responseId?: string
) => {
if (!responseId) return rows;
return rows.filter((row) => row.responseId !== responseId);
};
export const upsertToolCallRow = (
rows: DebugTranscriptRow[],
{
toolCallId,
toolName,
toolDisplayName,
executor,
turnId,
utteranceId,
responseId,
args,
rawCall,
}: {
toolCallId: string;
toolName: string;
toolDisplayName: string;
executor: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
args?: unknown;
rawCall?: unknown;
}
) => {
const rowIndex = rows.findIndex(
(row) => isToolRow(row) && row.toolCallId === toolCallId
);
if (rowIndex === -1) {
return [
...rows,
createToolRow({
toolCallId,
toolName,
toolDisplayName,
executor,
turnId,
utteranceId,
responseId,
status: 'pending',
args,
rawCall,
}),
];
}
const row = rows[rowIndex];
if (!isToolRow(row)) {
return rows;
}
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
toolName,
toolDisplayName,
executor,
turnId: row.turnId || turnId,
utteranceId: row.utteranceId || utteranceId,
responseId: row.responseId || responseId,
status: 'pending',
args: args !== undefined ? args : row.args,
rawCall: rawCall !== undefined ? rawCall : row.rawCall,
};
return nextRows;
};
export const normalizeToolStatus = (
statusCode?: number,
statusMessage?: string
): DebugTranscriptToolStatus => {
if (statusCode === 504 || String(statusMessage || '').toLowerCase().includes('timeout')) {
return 'timeout';
}
if (typeof statusCode === 'number' && statusCode >= 200 && statusCode < 300) {
return 'success';
}
return 'error';
};
export const resolveToolResultRow = (
rows: DebugTranscriptRow[],
{
toolCallId,
toolName,
toolDisplayName,
executor,
turnId,
utteranceId,
responseId,
source,
status,
args,
result,
error,
rawCall,
rawResult,
}: {
toolCallId: string;
toolName: string;
toolDisplayName: string;
executor?: string;
turnId?: string;
utteranceId?: string;
responseId?: string;
source?: string;
status: DebugTranscriptToolStatus;
args?: unknown;
result?: unknown;
error?: unknown;
rawCall?: unknown;
rawResult?: unknown;
}
) => {
const rowIndex = rows.findIndex(
(row) => isToolRow(row) && row.toolCallId === toolCallId
);
if (rowIndex === -1) {
return [
...rows,
createToolRow({
toolCallId,
toolName,
toolDisplayName,
executor: executor || 'server',
turnId,
utteranceId,
responseId,
source,
status,
args,
result,
error,
rawCall,
rawResult,
}),
];
}
const row = rows[rowIndex];
if (!isToolRow(row)) {
return rows;
}
const nextRows = [...rows];
nextRows[rowIndex] = {
...row,
toolName: toolName || row.toolName,
toolDisplayName: toolDisplayName || row.toolDisplayName,
executor: executor || row.executor,
turnId: row.turnId || turnId,
utteranceId: row.utteranceId || utteranceId,
responseId: row.responseId || responseId,
source: source || row.source,
status,
args: args !== undefined ? args : row.args,
result: result !== undefined ? result : row.result,
error,
rawCall: rawCall !== undefined ? rawCall : row.rawCall,
rawResult: rawResult !== undefined ? rawResult : row.rawResult,
};
return nextRows;
};

View File

@@ -1,36 +0,0 @@
export type DebugTranscriptTextRole = 'user' | 'assistant' | 'notice';
export type DebugTranscriptToolStatus = 'pending' | 'success' | 'error' | 'timeout';
export type DebugTranscriptCorrelation = {
turnId?: string;
utteranceId?: string;
responseId?: string;
};
export type DebugTranscriptTextRow = {
kind: 'text';
id: string;
role: DebugTranscriptTextRole;
text: string;
ttfbMs?: number;
isStreaming?: boolean;
} & DebugTranscriptCorrelation;
export type DebugTranscriptToolRow = {
kind: 'tool';
id: string;
toolCallId: string;
toolName: string;
toolDisplayName: string;
executor: string;
source?: string;
status: DebugTranscriptToolStatus;
args?: unknown;
result?: unknown;
error?: unknown;
rawCall?: unknown;
rawResult?: unknown;
} & DebugTranscriptCorrelation;
export type DebugTranscriptRow = DebugTranscriptTextRow | DebugTranscriptToolRow;

View File

@@ -3,22 +3,6 @@ import React, { useState, useEffect, useMemo, useRef } from 'react';
import { createPortal } from 'react-dom';
import { Plus, Search, Play, Square, Copy, Trash2, Mic, MessageSquare, Save, Video, PhoneOff, Camera, ArrowLeftRight, Send, Phone, Rocket, AlertTriangle, PhoneCall, CameraOff, Image, Images, CloudSun, Calendar, TrendingUp, Coins, Wrench, Globe, Terminal, X, ClipboardCheck, Sparkles, Volume2, Timer, ChevronDown, Database, Server, Zap, ExternalLink, Key, BrainCircuit, Ear, Book, Filter } from 'lucide-react';
import { Button, Input, Badge, Drawer, Dialog, Switch } from '../components/UI';
import TranscriptList from '../components/debug-transcript/TranscriptList';
import type { DebugTranscriptRow } from '../components/debug-transcript/types';
import {
appendNoticeRow,
appendTextRow,
attachAssistantTtfb,
finalizeAssistantTextRow,
finalizeUserDraftRow,
normalizeToolStatus,
resetTranscriptRows,
resolveToolResultRow,
trimInterruptedResponseRows,
updateAssistantDeltaRow,
updateUserDraftRow,
upsertToolCallRow,
} from '../components/debug-transcript/message-utils';
import { ASRModel, Assistant, AssistantOpenerToolCall, KnowledgeBase, LLMModel, TabValue, Tool, Voice } from '../types';
import { createAssistant, deleteAssistant, fetchASRModels, fetchAssistantOpenerAudioPcmBuffer, fetchAssistants, fetchKnowledgeBases, fetchLLMModels, fetchTools, fetchVoices, generateAssistantOpenerAudio, previewVoice, updateAssistant as updateAssistantApi } from '../services/backendApi';
import { useDebugPrefsStore } from '../stores/debugPrefsStore';
@@ -893,13 +877,9 @@ export const AssistantsPage: React.FC = () => {
{selectedAssistant.configMode === 'fastgpt' && (
<div className="space-y-2">
<label className="hidden">
<label className="text-sm font-medium text-white flex items-center">
<Key className="w-4 h-4 mr-2 text-primary" /> ID (APP ID)
<span className="text-sm text-white">?? ID (APP ID)</span>
</label>
<div className="text-sm font-medium text-white flex items-center">
<Key className="w-4 h-4 mr-2 text-primary" /> ID (APP ID)
</div>
<Input
value={selectedAssistant.appId || ''}
onChange={(e) => updateAssistant('appId', e.target.value)}
@@ -2241,6 +2221,13 @@ const extractDynamicTemplateKeys = (text: string): string[] => {
return Array.from(keys);
};
type DebugTranscriptMessage = {
role: 'user' | 'model' | 'tool';
text: string;
responseId?: string;
ttfbMs?: number;
};
type DebugPromptPendingResult = {
toolCallId: string;
toolName: string;
@@ -2412,6 +2399,33 @@ const normalizeFastGPTInteractiveFields = (rawForm: unknown[]): DebugFastGPTInte
return resolved;
};
// Stable transcription log so the scroll container is not recreated on every render (avoids scroll jumping)
const TranscriptionLog: React.FC<{
scrollRef: React.RefObject<HTMLDivElement | null>;
messages: DebugTranscriptMessage[];
isLoading: boolean;
className?: string;
}> = ({ scrollRef, messages, isLoading, className = '' }) => (
<div ref={scrollRef} className={`overflow-y-auto overflow-x-hidden space-y-4 p-2 border border-white/5 rounded-md bg-black/20 min-h-0 custom-scrollbar ${className}`}>
{messages.length === 0 && <div className="text-center text-muted-foreground text-xs py-4"></div>}
{messages.map((m, i) => (
<div key={i} className={`flex ${m.role === 'user' ? 'justify-end' : 'justify-start'}`}>
<div className={`max-w-[85%] rounded-lg px-3 py-2 text-sm ${m.role === 'user' ? 'bg-primary text-primary-foreground' : m.role === 'tool' ? 'bg-amber-500/10 border border-amber-400/30 text-amber-100' : 'bg-card border border-white/10 shadow-sm text-foreground'}`}>
<div className="mb-0.5 flex items-center gap-1.5">
<span className="text-[10px] opacity-70 uppercase tracking-wider">{m.role === 'user' ? 'Me' : m.role === 'tool' ? 'Tool' : 'AI'}</span>
{m.role === 'model' && typeof m.ttfbMs === 'number' && Number.isFinite(m.ttfbMs) && (
<span className="rounded border border-cyan-300/40 bg-cyan-500/10 px-1.5 py-0.5 text-[10px] text-cyan-200">
TTFB {Math.round(m.ttfbMs)}ms
</span>
)}
</div>
{m.text}
</div>
</div>
))}
</div>
);
// --- Debug Drawer Component ---
export const DebugDrawer: React.FC<{
isOpen: boolean;
@@ -2469,7 +2483,7 @@ export const DebugDrawer: React.FC<{
};
const [mode, setMode] = useState<'text' | 'voice' | 'video'>('text');
const [messages, setMessages] = useState<DebugTranscriptRow[]>([]);
const [messages, setMessages] = useState<DebugTranscriptMessage[]>([]);
const [inputText, setInputText] = useState('');
const [isLoading, setIsLoading] = useState(false);
const [callStatus, setCallStatus] = useState<'idle' | 'calling' | 'active'>('idle');
@@ -2520,8 +2534,6 @@ export const DebugDrawer: React.FC<{
const setNsEnabled = useDebugPrefsStore((state) => state.setNsEnabled);
const agcEnabled = useDebugPrefsStore((state) => state.agcEnabled);
const setAgcEnabled = useDebugPrefsStore((state) => state.setAgcEnabled);
const fastgptInteractiveVoiceEnabled = useDebugPrefsStore((state) => state.fastgptInteractiveVoiceEnabled);
const setFastgptInteractiveVoiceEnabled = useDebugPrefsStore((state) => state.setFastgptInteractiveVoiceEnabled);
const clientToolEnabledMap = useDebugPrefsStore((state) => state.clientToolEnabledMap);
const setClientToolEnabled = useDebugPrefsStore((state) => state.setClientToolEnabled);
const hydrateClientToolDefaults = useDebugPrefsStore((state) => state.hydrateClientToolDefaults);
@@ -2567,7 +2579,8 @@ export const DebugDrawer: React.FC<{
const pendingResolveRef = useRef<(() => void) | null>(null);
const pendingRejectRef = useRef<((e: Error) => void) | null>(null);
const submittedMetadataRef = useRef<Record<string, any> | null>(null);
const assistantDraftRowIdRef = useRef<string | null>(null);
const assistantDraftIndexRef = useRef<number | null>(null);
const assistantResponseIndexByIdRef = useRef<Map<string, number>>(new Map());
const pendingTtfbByResponseIdRef = useRef<Map<string, number>>(new Map());
const interruptedResponseIdsRef = useRef<Set<string>>(new Set());
const interruptedDropNoticeKeysRef = useRef<Set<string>>(new Set());
@@ -2591,7 +2604,7 @@ export const DebugDrawer: React.FC<{
const micProcessorRef = useRef<ScriptProcessorNode | null>(null);
const micGainRef = useRef<GainNode | null>(null);
const micFrameBufferRef = useRef<Uint8Array>(new Uint8Array(0));
const userDraftRowIdRef = useRef<string | null>(null);
const userDraftIndexRef = useRef<number | null>(null);
const lastUserFinalRef = useRef<string>('');
const debugVolumePercentRef = useRef<number>(50);
const clientToolEnabledMapRef = useRef<Record<string, boolean>>(clientToolEnabledMap);
@@ -2632,7 +2645,8 @@ export const DebugDrawer: React.FC<{
}, [assistant.tools, tools, clientToolEnabledMap]);
const clearResponseTracking = () => {
assistantDraftRowIdRef.current = null;
assistantDraftIndexRef.current = null;
assistantResponseIndexByIdRef.current.clear();
pendingTtfbByResponseIdRef.current.clear();
interruptedResponseIdsRef.current.clear();
interruptedDropNoticeKeysRef.current.clear();
@@ -2644,18 +2658,6 @@ export const DebugDrawer: React.FC<{
return responseId || undefined;
};
const extractTurnId = (payload: any): string | undefined => {
const turnIdRaw = payload?.data?.turn_id ?? payload?.turn_id ?? payload?.turnId;
const turnId = String(turnIdRaw || '').trim();
return turnId || undefined;
};
const extractUtteranceId = (payload: any): string | undefined => {
const utteranceIdRaw = payload?.data?.utterance_id ?? payload?.utterance_id ?? payload?.utteranceId;
const utteranceId = String(utteranceIdRaw || '').trim();
return utteranceId || undefined;
};
const noteInterruptedDrop = (responseId: string, kind: 'ttfb' | 'delta' | 'final') => {
const key = `${responseId}:${kind}`;
if (interruptedDropNoticeKeysRef.current.has(key)) return;
@@ -2664,9 +2666,13 @@ export const DebugDrawer: React.FC<{
const oldest = interruptedDropNoticeKeysRef.current.values().next().value as string | undefined;
if (oldest) interruptedDropNoticeKeysRef.current.delete(oldest);
}
setMessages((prev) =>
appendNoticeRow(prev, `drop stale ${kind} from interrupted response ${responseId}`)
);
setMessages((prev) => [
...prev,
{
role: 'tool',
text: `drop stale ${kind} from interrupted response ${responseId}`,
},
]);
};
// Initialize
@@ -2674,11 +2680,11 @@ export const DebugDrawer: React.FC<{
if (isOpen) {
if (mode === 'text') {
clearResponseTracking();
setMessages(resetTranscriptRows());
setMessages([]);
setTextSessionStarted(false);
} else {
clearResponseTracking();
setMessages(resetTranscriptRows());
setMessages([]);
setCallStatus('idle');
setAgentState('listening');
}
@@ -2746,12 +2752,6 @@ export const DebugDrawer: React.FC<{
fastgptInteractiveDialogRef.current = fastgptInteractiveDialog;
}, [fastgptInteractiveDialog]);
useEffect(() => {
if (!fastgptInteractiveVoiceEnabled && fastgptInteractiveDialog.open) {
stopPromptVoicePlayback();
}
}, [fastgptInteractiveVoiceEnabled, fastgptInteractiveDialog.open]);
useEffect(() => {
dynamicVariableSeqRef.current = 0;
setDynamicVariables([]);
@@ -2959,18 +2959,17 @@ export const DebugDrawer: React.FC<{
const statusCode = Number(resultPayload?.status?.code || 500);
const statusMessage = String(resultPayload?.status?.message || 'error');
const displayName = toolDisplayName || String(resultPayload?.name || 'unknown_tool');
setMessages((prev) =>
resolveToolResultRow(prev, {
toolCallId: String(resultPayload?.tool_call_id || '').trim(),
toolName: normalizeToolId(resultPayload?.name || 'unknown_tool'),
toolDisplayName: displayName,
source: 'client',
status: normalizeToolStatus(statusCode, statusMessage),
result: resultPayload?.output,
error: statusCode >= 200 && statusCode < 300 ? undefined : resultPayload?.output ?? statusMessage,
rawResult: resultPayload,
})
);
const resultText =
statusCode === 200 && typeof resultPayload?.output?.result === 'number'
? `result ${displayName} = ${resultPayload.output.result}`
: `result ${displayName} status=${statusCode} ${statusMessage}`;
setMessages((prev) => [
...prev,
{
role: 'tool',
text: resultText,
},
]);
};
const stopPromptVoicePlayback = () => {
@@ -3089,7 +3088,7 @@ export const DebugDrawer: React.FC<{
submitLabel: item.payload.submitLabel,
cancelLabel: item.payload.cancelLabel,
});
if (nextVoiceText && fastgptInteractiveVoiceEnabled) {
if (nextVoiceText) {
void playPromptVoice(nextVoiceText);
}
return;
@@ -3164,10 +3163,6 @@ export const DebugDrawer: React.FC<{
const fieldValues = snapshot.fieldValues;
const interactionType = snapshot.interactionType;
stopPromptVoicePlayback();
// Stop only local playback so the resumed FastGPT response can take over
// without cancelling the active server-side turn.
stopPlaybackImmediately();
setAgentState('waiting');
setFastgptInteractiveDialog({
open: false,
interactionType: 'userSelect',
@@ -3397,12 +3392,7 @@ export const DebugDrawer: React.FC<{
setCallStatus('calling');
setTimeout(() => {
setCallStatus('active');
setMessages(
appendTextRow(resetTranscriptRows(), {
role: 'assistant',
text: assistant.opener || 'Hello!',
})
);
setMessages([{ role: 'model', text: assistant.opener || "Hello!" }]);
}, 1500);
return;
}
@@ -3410,7 +3400,7 @@ export const DebugDrawer: React.FC<{
try {
setCallStatus('calling');
clearResponseTracking();
setMessages(resetTranscriptRows());
setMessages([]);
lastUserFinalRef.current = '';
setWsError('');
setDynamicVariablesError('');
@@ -3452,7 +3442,7 @@ export const DebugDrawer: React.FC<{
setCallStatus('idle');
setAgentState('listening');
clearResponseTracking();
setMessages(resetTranscriptRows());
setMessages([]);
setTextPromptDialog({ open: false, message: '', promptType: 'text' });
setChoicePromptDialog({ open: false, question: '', options: [] });
lastUserFinalRef.current = '';
@@ -3462,14 +3452,8 @@ export const DebugDrawer: React.FC<{
const handleSend = async () => {
if (!inputText.trim()) return;
const userMsg = inputText;
assistantDraftRowIdRef.current = null;
setMessages((prev) =>
appendTextRow(prev, {
role: 'user',
text: userMsg,
isStreaming: false,
})
);
assistantDraftIndexRef.current = null;
setMessages(prev => [...prev, { role: 'user', text: userMsg }]);
setInputText('');
setIsLoading(true);
@@ -3489,13 +3473,7 @@ export const DebugDrawer: React.FC<{
wsRef.current?.send(JSON.stringify({ type: 'input.text', text: userMsg }));
} else {
setTimeout(() => {
setMessages((prev) =>
appendTextRow(prev, {
role: 'assistant',
text: `[Mock Response]: Received "${userMsg}"`,
isStreaming: false,
})
);
setMessages(prev => [...prev, { role: 'model', text: `[Mock Response]: Received "${userMsg}"` }]);
setIsLoading(false);
}, 1000);
}
@@ -3507,13 +3485,7 @@ export const DebugDrawer: React.FC<{
return;
}
const errMessage = err?.message || 'Failed to connect to AI service.';
setMessages((prev) =>
appendTextRow(prev, {
role: 'assistant',
text: `Error: ${errMessage}`,
isStreaming: false,
})
);
setMessages(prev => [...prev, { role: 'model', text: `Error: ${errMessage}` }]);
setWsError(errMessage);
setIsLoading(false);
} finally {
@@ -3527,7 +3499,7 @@ export const DebugDrawer: React.FC<{
setDynamicVariablesError('');
// Start every text debug run as a fresh session transcript.
clearResponseTracking();
setMessages(resetTranscriptRows());
setMessages([]);
lastUserFinalRef.current = '';
// Force a fresh WS session so updated assistant runtime config
// (voice/model/provider/speed) is applied on session.start.
@@ -3825,7 +3797,7 @@ export const DebugDrawer: React.FC<{
pendingResolveRef.current = null;
pendingRejectRef.current = null;
clearResponseTracking();
userDraftRowIdRef.current = null;
userDraftIndexRef.current = null;
lastUserFinalRef.current = '';
micFrameBufferRef.current = new Uint8Array(0);
stopPromptVoicePlayback();
@@ -3932,10 +3904,8 @@ export const DebugDrawer: React.FC<{
const oldest = interruptedResponseIdsRef.current.values().next().value as string | undefined;
if (oldest) interruptedResponseIdsRef.current.delete(oldest);
}
pendingTtfbByResponseIdRef.current.delete(interruptedResponseId);
setMessages((prev) => trimInterruptedResponseRows(prev, interruptedResponseId));
}
assistantDraftRowIdRef.current = null;
assistantDraftIndexRef.current = null;
setIsLoading(false);
stopPlaybackImmediately();
setAgentState('waiting');
@@ -3952,16 +3922,29 @@ export const DebugDrawer: React.FC<{
return;
}
if (responseId) {
setMessages((prev) => {
const nextRows = attachAssistantTtfb(prev, { responseId, ttfbMs });
if (nextRows === prev) {
pendingTtfbByResponseIdRef.current.set(responseId, ttfbMs);
}
return nextRows;
});
const indexed = assistantResponseIndexByIdRef.current.get(responseId);
if (typeof indexed === 'number') {
setMessages((prev) => {
if (!prev[indexed] || prev[indexed].role !== 'model') return prev;
const next = [...prev];
next[indexed] = { ...next[indexed], ttfbMs };
return next;
});
} else {
pendingTtfbByResponseIdRef.current.set(responseId, ttfbMs);
}
return;
}
setMessages((prev) => attachAssistantTtfb(prev, { ttfbMs }));
setMessages((prev) => {
for (let i = prev.length - 1; i >= 0; i -= 1) {
if (prev[i]?.role === 'model') {
const next = [...prev];
next[i] = { ...next[i], ttfbMs };
return next;
}
}
return prev;
});
return;
}
@@ -3972,35 +3955,24 @@ export const DebugDrawer: React.FC<{
const toolDisplayName = String(payload?.tool_display_name || toolCall?.displayName || toolName);
const executor = String(toolCall?.executor || 'server').toLowerCase();
const rawArgs = String(toolCall?.function?.arguments || '');
const turnId = extractTurnId(payload);
const utteranceId = extractUtteranceId(payload);
const responseId = extractResponseId(payload);
let parsedArgsValue: unknown = rawArgs || undefined;
if (rawArgs) {
try {
parsedArgsValue = JSON.parse(rawArgs);
} catch {
parsedArgsValue = rawArgs;
}
}
setMessages((prev) =>
upsertToolCallRow(prev, {
toolCallId,
toolName,
toolDisplayName,
executor,
turnId,
utteranceId,
responseId,
args: parsedArgsValue,
rawCall: payload,
})
);
const argText = rawArgs.length > 160 ? `${rawArgs.slice(0, 160)}...` : rawArgs;
setMessages((prev) => [
...prev,
{
role: 'tool',
text: `call ${toolDisplayName} executor=${executor}${argText ? ` args=${argText}` : ''}`,
},
]);
if (executor === 'client' && toolCallId && ws.readyState === WebSocket.OPEN) {
const parsedArgs =
parsedArgsValue && typeof parsedArgsValue === 'object' && !Array.isArray(parsedArgsValue)
? (parsedArgsValue as Record<string, any>)
: {};
let parsedArgs: Record<string, any> = {};
if (rawArgs) {
try {
const candidate = JSON.parse(rawArgs);
parsedArgs = candidate && typeof candidate === 'object' ? candidate : {};
} catch {
parsedArgs = {};
}
}
const waitForResponseRaw = Boolean(
payload?.wait_for_response ?? toolCall?.wait_for_response ?? toolCall?.waitForResponse ?? false
);
@@ -4268,29 +4240,17 @@ export const DebugDrawer: React.FC<{
if (type === 'assistant.tool_result') {
const result = payload?.result || {};
const toolCallId = String(payload?.tool_call_id || result?.tool_call_id || '').trim();
const toolName = normalizeToolId(result?.name || 'unknown_tool');
const toolDisplayName = String(payload?.tool_display_name || result?.tool_display_name || toolName);
const toolDisplayName = String(payload?.tool_display_name || toolName);
const statusCode = Number(result?.status?.code || 500);
const statusMessage = String(result?.status?.message || 'error');
const source = String(payload?.source || 'server');
const turnId = extractTurnId(payload);
const utteranceId = extractUtteranceId(payload);
setMessages((prev) =>
resolveToolResultRow(prev, {
toolCallId,
toolName,
toolDisplayName,
turnId,
utteranceId,
responseId: extractResponseId(payload),
source,
status: normalizeToolStatus(statusCode, statusMessage),
result: result?.output,
error: statusCode >= 200 && statusCode < 300 ? undefined : result?.output ?? statusMessage,
rawResult: payload,
})
);
const output = result?.output;
const resultText =
statusCode === 200
? `result ${toolDisplayName} source=${source} ${JSON.stringify(output)}`
: `result ${toolDisplayName} source=${source} status=${statusCode} ${statusMessage}`;
setMessages((prev) => [...prev, { role: 'tool', text: resultText }]);
return;
}
@@ -4332,17 +4292,17 @@ export const DebugDrawer: React.FC<{
if (type === 'transcript.delta') {
const delta = String(payload.text || '');
if (!delta) return;
const turnId = extractTurnId(payload);
const utteranceId = extractUtteranceId(payload);
setMessages((prev) => {
const nextState = updateUserDraftRow(prev, {
draftRowId: userDraftRowIdRef.current,
text: delta,
turnId,
utteranceId,
});
userDraftRowIdRef.current = nextState.draftRowId;
return nextState.rows;
const idx = userDraftIndexRef.current;
if (idx === null || !prev[idx] || prev[idx].role !== 'user') {
const next = [...prev, { role: 'user' as const, text: delta }];
userDraftIndexRef.current = next.length - 1;
return next;
}
const next = [...prev];
// ASR interim is typically the latest partial text, not a true text delta.
next[idx] = { ...next[idx], text: delta };
return next;
});
return;
}
@@ -4350,25 +4310,31 @@ export const DebugDrawer: React.FC<{
if (type === 'transcript.final') {
const finalText = String(payload.text || '');
if (!finalText) {
userDraftRowIdRef.current = null;
userDraftIndexRef.current = null;
return;
}
if (lastUserFinalRef.current === finalText) {
userDraftRowIdRef.current = null;
userDraftIndexRef.current = null;
return;
}
const turnId = extractTurnId(payload);
const utteranceId = extractUtteranceId(payload);
setMessages((prev) => {
const nextState = finalizeUserDraftRow(prev, {
draftRowId: userDraftRowIdRef.current,
text: finalText,
turnId,
utteranceId,
});
userDraftRowIdRef.current = nextState.draftRowId;
const idx = userDraftIndexRef.current;
userDraftIndexRef.current = null;
if (idx !== null && prev[idx] && prev[idx].role === 'user') {
const next = [...prev];
next[idx] = { ...next[idx], text: finalText || next[idx].text };
lastUserFinalRef.current = finalText;
return next;
}
const last = prev[prev.length - 1];
if (last?.role === 'user') {
const next = [...prev];
next[next.length - 1] = { ...last, text: finalText };
lastUserFinalRef.current = finalText;
return next;
}
lastUserFinalRef.current = finalText;
return nextState.rows;
return [...prev, { role: 'user', text: finalText }];
});
return;
}
@@ -4376,56 +4342,143 @@ export const DebugDrawer: React.FC<{
if (type === 'assistant.response.delta') {
const delta = String(payload.text || '');
if (!delta) return;
const turnId = extractTurnId(payload);
const utteranceId = extractUtteranceId(payload);
const responseId = extractResponseId(payload);
if (responseId && interruptedResponseIdsRef.current.has(responseId)) {
noteInterruptedDrop(responseId, 'delta');
return;
}
setMessages((prev) => {
const pendingTtfb = responseId ? pendingTtfbByResponseIdRef.current.get(responseId) : undefined;
const nextState = updateAssistantDeltaRow(prev, {
draftRowId: assistantDraftRowIdRef.current,
delta,
turnId,
utteranceId,
responseId,
ttfbMs: pendingTtfb,
});
assistantDraftRowIdRef.current = nextState.draftRowId;
if (responseId && typeof pendingTtfb === 'number') {
let idx = assistantDraftIndexRef.current;
if (idx === null || !prev[idx] || prev[idx].role !== 'model') {
// Tool records can be appended between assistant chunks; recover the
// latest model row instead of creating a duplicate assistant row.
for (let i = prev.length - 1; i >= 0; i -= 1) {
if (prev[i]?.role === 'model') {
if (
responseId
&& prev[i].responseId
&& prev[i].responseId !== responseId
) {
break;
}
idx = i;
assistantDraftIndexRef.current = i;
break;
}
if (prev[i]?.role === 'user') break;
}
}
if (idx === null || !prev[idx] || prev[idx].role !== 'model') {
const last = prev[prev.length - 1];
if (last?.role === 'model' && last.text === delta) {
return prev;
}
const nextMessage: DebugTranscriptMessage = { role: 'model' as const, text: delta };
if (responseId) {
nextMessage.responseId = responseId;
if (pendingTtfbByResponseIdRef.current.has(responseId)) {
nextMessage.ttfbMs = pendingTtfbByResponseIdRef.current.get(responseId);
pendingTtfbByResponseIdRef.current.delete(responseId);
}
}
const next = [...prev, nextMessage];
assistantDraftIndexRef.current = next.length - 1;
if (responseId) {
assistantResponseIndexByIdRef.current.set(responseId, next.length - 1);
}
return next;
}
const next = [...prev];
const nextMessage = { ...next[idx], text: next[idx].text + delta };
if (responseId && !nextMessage.responseId) {
nextMessage.responseId = responseId;
}
if (
responseId
&& typeof nextMessage.ttfbMs !== 'number'
&& pendingTtfbByResponseIdRef.current.has(responseId)
) {
nextMessage.ttfbMs = pendingTtfbByResponseIdRef.current.get(responseId);
pendingTtfbByResponseIdRef.current.delete(responseId);
}
return nextState.rows;
next[idx] = nextMessage;
if (responseId) {
assistantResponseIndexByIdRef.current.set(responseId, idx);
}
return next;
});
return;
}
if (type === 'assistant.response.final') {
const finalText = String(payload.text || '');
const turnId = extractTurnId(payload);
const utteranceId = extractUtteranceId(payload);
const responseId = extractResponseId(payload);
if (responseId && interruptedResponseIdsRef.current.has(responseId)) {
noteInterruptedDrop(responseId, 'final');
return;
}
setMessages((prev) => {
const pendingTtfb = responseId ? pendingTtfbByResponseIdRef.current.get(responseId) : undefined;
const nextState = finalizeAssistantTextRow(prev, {
draftRowId: assistantDraftRowIdRef.current,
text: finalText,
turnId,
utteranceId,
responseId,
ttfbMs: pendingTtfb,
});
assistantDraftRowIdRef.current = nextState.draftRowId;
if (responseId && typeof pendingTtfb === 'number') {
pendingTtfbByResponseIdRef.current.delete(responseId);
let idx = assistantDraftIndexRef.current;
assistantDraftIndexRef.current = null;
if (idx === null || !prev[idx] || prev[idx].role !== 'model') {
for (let i = prev.length - 1; i >= 0; i -= 1) {
if (prev[i]?.role === 'model') {
if (
responseId
&& prev[i].responseId
&& prev[i].responseId !== responseId
) {
break;
}
idx = i;
break;
}
if (prev[i]?.role === 'user') break;
}
}
return nextState.rows;
if (idx !== null && prev[idx] && prev[idx].role === 'model') {
const next = [...prev];
const nextMessage = { ...next[idx], text: finalText || next[idx].text };
if (responseId && !nextMessage.responseId) {
nextMessage.responseId = responseId;
}
if (
responseId
&& typeof nextMessage.ttfbMs !== 'number'
&& pendingTtfbByResponseIdRef.current.has(responseId)
) {
nextMessage.ttfbMs = pendingTtfbByResponseIdRef.current.get(responseId);
pendingTtfbByResponseIdRef.current.delete(responseId);
}
next[idx] = nextMessage;
if (responseId) {
assistantResponseIndexByIdRef.current.set(responseId, idx);
}
return next;
}
if (!finalText) return prev;
const last = prev[prev.length - 1];
if (last?.role === 'model') {
if (last.text === finalText) return prev;
if (finalText.startsWith(last.text) || last.text.startsWith(finalText)) {
const next = [...prev];
next[next.length - 1] = { ...last, text: finalText };
return next;
}
}
const nextMessage: DebugTranscriptMessage = { role: 'model', text: finalText };
if (responseId) {
nextMessage.responseId = responseId;
if (pendingTtfbByResponseIdRef.current.has(responseId)) {
nextMessage.ttfbMs = pendingTtfbByResponseIdRef.current.get(responseId);
pendingTtfbByResponseIdRef.current.delete(responseId);
}
}
const next = [...prev, nextMessage];
if (responseId) {
assistantResponseIndexByIdRef.current.set(responseId, next.length - 1);
}
return next;
});
setIsLoading(false);
return;
@@ -4456,8 +4509,7 @@ export const DebugDrawer: React.FC<{
ws.onclose = () => {
wsReadyRef.current = false;
setTextSessionStarted(false);
userDraftRowIdRef.current = null;
assistantDraftRowIdRef.current = null;
userDraftIndexRef.current = null;
stopPlaybackImmediately();
if (wsStatusRef.current !== 'error') setWsStatus('disconnected');
};
@@ -4553,23 +4605,6 @@ export const DebugDrawer: React.FC<{
Auto Gain Control (AGC)
</label>
</div>
<div className="rounded-md border border-white/10 bg-black/20 p-2 space-y-2">
<p className="text-[10px] uppercase tracking-widest text-muted-foreground">Prompt Voice</p>
<div className="flex items-center justify-between gap-3 rounded-md border border-white/10 bg-black/20 px-2 py-1.5">
<div className="min-w-0">
<div className="text-[11px] font-mono text-foreground truncate">FastGPT Interactive</div>
<div className="text-[10px] text-muted-foreground">
Play the interactive description or prompt voice when the popup opens.
</div>
</div>
<Switch
checked={fastgptInteractiveVoiceEnabled}
onCheckedChange={setFastgptInteractiveVoiceEnabled}
title={fastgptInteractiveVoiceEnabled ? 'Click to mute FastGPT interactive prompt voice' : 'Click to enable FastGPT interactive prompt voice'}
aria-label={`FastGPT interactive prompt voice ${fastgptInteractiveVoiceEnabled ? 'enabled' : 'disabled'}`}
/>
</div>
</div>
<div className="rounded-md border border-white/10 bg-black/20 p-2 space-y-2">
<p className="text-[10px] uppercase tracking-widest text-muted-foreground">Client Tools</p>
<p className="text-[11px] text-muted-foreground"></p>
@@ -4952,7 +4987,7 @@ export const DebugDrawer: React.FC<{
<p className="text-xs"></p>
</div>
) : (
<TranscriptList scrollRef={scrollRef} messages={messages} isLoading={isLoading} className="pb-4" />
<TranscriptionLog scrollRef={scrollRef} messages={messages} isLoading={isLoading} className="pb-4" />
)}
</div>
</div>
@@ -5051,7 +5086,7 @@ export const DebugDrawer: React.FC<{
)}
{fastgptInteractiveDialog.open && (
<div className="absolute inset-0 z-40 flex items-center justify-center bg-black/55 backdrop-blur-[1px]">
<div className="relative flex max-h-[82vh] w-[92%] max-w-lg flex-col rounded-xl border border-white/15 bg-card/95 p-4 shadow-2xl animate-in zoom-in-95 duration-200">
<div className="relative w-[92%] max-w-lg rounded-xl border border-white/15 bg-card/95 p-4 shadow-2xl animate-in zoom-in-95 duration-200">
{!fastgptInteractiveDialog.required && (
<button
type="button"
@@ -5087,7 +5122,6 @@ export const DebugDrawer: React.FC<{
</p>
)}
</div>
<div className="min-h-0 overflow-y-auto pr-1 custom-scrollbar">
{fastgptInteractiveDialog.interactionType === 'userSelect' ? (
<div className="space-y-2">
{fastgptInteractiveDialog.options.map((option) => {
@@ -5168,7 +5202,6 @@ export const DebugDrawer: React.FC<{
})}
</div>
)}
</div>
<div className="mt-4 flex items-center justify-end gap-2">
<Button
size="sm"

View File

@@ -6,13 +6,11 @@ type DebugPrefsState = {
aecEnabled: boolean;
nsEnabled: boolean;
agcEnabled: boolean;
fastgptInteractiveVoiceEnabled: boolean;
clientToolEnabledMap: Record<string, boolean>;
setWsUrl: (value: string) => void;
setAecEnabled: (value: boolean) => void;
setNsEnabled: (value: boolean) => void;
setAgcEnabled: (value: boolean) => void;
setFastgptInteractiveVoiceEnabled: (value: boolean) => void;
setClientToolEnabled: (toolId: string, enabled: boolean) => void;
hydrateClientToolDefaults: (toolIds: string[]) => void;
};
@@ -32,13 +30,11 @@ export const useDebugPrefsStore = create<DebugPrefsState>()(
aecEnabled: true,
nsEnabled: true,
agcEnabled: true,
fastgptInteractiveVoiceEnabled: true,
clientToolEnabledMap: {},
setWsUrl: (value) => set({ wsUrl: value }),
setAecEnabled: (value) => set({ aecEnabled: value }),
setNsEnabled: (value) => set({ nsEnabled: value }),
setAgcEnabled: (value) => set({ agcEnabled: value }),
setFastgptInteractiveVoiceEnabled: (value) => set({ fastgptInteractiveVoiceEnabled: value }),
setClientToolEnabled: (toolId, enabled) =>
set((state) => ({
clientToolEnabledMap: {