Files
AI-VideoAssistant/web/types.ts
Xin Wang da38157638 Add ASR interim results support in Assistant model and API
- Introduced `asr_interim_enabled` field in the Assistant model to control interim ASR results.
- Updated AssistantBase and AssistantUpdate schemas to include the new field.
- Modified the database schema to add the `asr_interim_enabled` column.
- Enhanced runtime metadata to reflect interim ASR settings.
- Updated API endpoints and tests to validate the new functionality.
- Adjusted documentation to include details about interim ASR results configuration.
2026-03-06 12:58:54 +08:00

241 lines
5.0 KiB
TypeScript

export interface Assistant {
id: string;
name: string;
callCount: number;
firstTurnMode?: 'bot_first' | 'user_first';
opener: string;
manualOpenerToolCalls?: AssistantOpenerToolCall[];
generatedOpenerEnabled?: boolean;
openerAudioEnabled?: boolean;
openerAudioReady?: boolean;
openerAudioDurationMs?: number;
openerAudioUpdatedAt?: string;
prompt: string;
knowledgeBaseId: string;
language: 'zh' | 'en';
voiceOutputEnabled?: boolean;
voice: string; // This will now store the ID of the voice from Voice Library
speed: number;
hotwords: string[];
tools?: string[]; // IDs of enabled tools
asrInterimEnabled?: boolean;
botCannotBeInterrupted?: boolean;
interruptionSensitivity?: number; // In ms
configMode?: 'platform' | 'dify' | 'fastgpt' | 'none';
apiUrl?: string;
apiKey?: string;
llmModelId?: string;
asrModelId?: string;
embeddingModelId?: string;
rerankModelId?: string;
}
export interface AssistantOpenerToolCall {
toolName: string;
arguments?: string | Record<string, any>;
}
export interface Voice {
id: string;
name: string;
vendor: string;
gender: string;
language: string;
description: string;
model?: string;
voiceKey?: string;
apiKey?: string;
baseUrl?: string;
speed?: number;
gain?: number;
pitch?: number;
enabled?: boolean;
isSystem?: boolean;
}
export interface KnowledgeBase {
id: string;
name: string;
creator: string;
createdAt: string;
description?: string;
embeddingModel?: string;
chunkSize?: number;
chunkOverlap?: number;
status?: string;
documents: KnowledgeDocument[];
}
export interface KnowledgeDocument {
id: string;
name: string;
size: string;
uploadDate: string;
status?: string;
chunkCount?: number;
}
export type InteractionType = 'text' | 'audio' | 'video';
export interface InteractionDetail {
role: 'user' | 'assistant';
content: string; // Text content or transcript
audioUrl?: string; // Placeholder for audio url
imageUrls?: string[]; // For video frames
timestamp: string;
}
export interface CallLog {
id: string;
source: 'debug' | 'external';
status: 'connected' | 'missed';
startTime: string;
duration: string;
agentName: string;
type: InteractionType;
details?: InteractionDetail[];
}
export interface Workflow {
id: string;
name: string;
nodeCount: number;
createdAt: string;
updatedAt: string;
nodes: WorkflowNode[];
edges: WorkflowEdge[];
globalPrompt?: string;
}
export type WorkflowNodeType = 'start' | 'assistant' | 'tool' | 'human_transfer' | 'end' | 'conversation' | 'human';
export interface WorkflowCondition {
type: 'always' | 'contains' | 'equals' | 'regex' | 'llm' | 'default';
source?: 'user' | 'assistant';
value?: string;
values?: string[];
prompt?: string;
}
export interface WorkflowNode {
id?: string;
name: string;
type: WorkflowNodeType;
isStart?: boolean;
metadata: {
position: { x: number; y: number };
};
assistantId?: string;
assistant?: Record<string, any>;
prompt?: string;
messagePlan?: {
firstMessage?: string;
};
variableExtractionPlan?: {
output: Array<{
type: string;
title: string;
description: string;
}>;
};
tool?: {
type: string;
function: {
name: string;
parameters: any;
};
destinations?: any[];
messages?: any[];
};
globalNodePlan?: {
enabled: boolean;
enterCondition: string;
};
}
export interface WorkflowEdge {
id?: string;
fromNodeId?: string;
toNodeId?: string;
from: string;
to: string;
label?: string;
condition?: WorkflowCondition;
priority?: number;
}
export enum TabValue {
GLOBAL = 'global',
VOICE = 'voice',
TOOLS = 'tools',
KNOWLEDGE = 'knowledge',
LINK = 'link'
}
export enum TestType {
FIXED = 'fixed',
INTELLIGENT = 'intelligent'
}
export enum TestMethod {
TEXT = 'text',
AUDIO = 'audio'
}
export interface AutoTestAssistant {
id: string;
name: string;
type: TestType;
method: TestMethod;
targetAssistantId: string;
fixedWorkflowSteps: string[];
intelligentPrompt: string;
createdAt: string;
}
export interface Tool {
id: string;
name: string;
description: string;
category: 'system' | 'query';
icon: string;
httpMethod?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';
httpUrl?: string;
httpHeaders?: Record<string, string>;
httpTimeoutMs?: number;
parameterSchema?: Record<string, any>;
parameterDefaults?: Record<string, any>;
waitForResponse?: boolean;
isCustom?: boolean;
isSystem?: boolean;
enabled?: boolean;
}
export interface LLMModel {
id: string;
name: string;
vendor: string;
type: 'text' | 'embedding' | 'rerank';
baseUrl: string;
apiKey: string;
modelName?: string;
temperature?: number;
contextLength?: number;
enabled?: boolean;
}
export interface ASRModel {
id: string;
name: string;
vendor: string;
language: string;
baseUrl: string;
apiKey: string;
modelName?: string;
hotwords?: string[];
enablePunctuation?: boolean;
enableNormalization?: boolean;
enabled?: boolean;
}