Update llm library web interface

This commit is contained in:
Xin Wang
2026-02-08 23:55:40 +08:00
parent 6462c4f432
commit be68e335f1
6 changed files with 614 additions and 187 deletions

View File

@@ -1,4 +1,4 @@
import { ASRModel, Assistant, CallLog, InteractionDetail, KnowledgeBase, KnowledgeDocument, Voice, Workflow, WorkflowEdge, WorkflowNode } from '../types';
import { ASRModel, Assistant, CallLog, InteractionDetail, KnowledgeBase, KnowledgeDocument, LLMModel, Voice, Workflow, WorkflowEdge, WorkflowNode } from '../types';
import { apiRequest } from './apiClient';
type AnyRecord = Record<string, any>;
@@ -78,6 +78,19 @@ const mapASRModel = (raw: AnyRecord): ASRModel => ({
enabled: Boolean(readField(raw, ['enabled'], true)),
});
const mapLLMModel = (raw: AnyRecord): LLMModel => ({
id: String(readField(raw, ['id'], '')),
name: readField(raw, ['name'], ''),
vendor: readField(raw, ['vendor'], 'OpenAI Compatible'),
type: readField(raw, ['type'], 'text'),
baseUrl: readField(raw, ['baseUrl', 'base_url'], ''),
apiKey: readField(raw, ['apiKey', 'api_key'], ''),
modelName: readField(raw, ['modelName', 'model_name'], ''),
temperature: Number(readField(raw, ['temperature'], 0.7)),
contextLength: Number(readField(raw, ['contextLength', 'context_length'], 0)),
enabled: Boolean(readField(raw, ['enabled'], true)),
});
const mapWorkflowNode = (raw: AnyRecord): WorkflowNode => ({
name: readField(raw, ['name'], ''),
type: readField(raw, ['type'], 'conversation') as 'conversation' | 'tool' | 'human' | 'end',
@@ -351,6 +364,67 @@ export const previewASRModel = async (
return data || { success: false, error: 'Invalid preview response' };
};
export const fetchLLMModels = async (): Promise<LLMModel[]> => {
const response = await apiRequest<{ list?: AnyRecord[] } | AnyRecord[]>('/llm');
const list = Array.isArray(response) ? response : (response.list || []);
return list.map((item) => mapLLMModel(item));
};
export const createLLMModel = async (data: Partial<LLMModel>): Promise<LLMModel> => {
const payload = {
id: data.id || undefined,
name: data.name || 'New LLM Model',
vendor: data.vendor || 'OpenAI Compatible',
type: data.type || 'text',
base_url: data.baseUrl || '',
api_key: data.apiKey || '',
model_name: data.modelName || undefined,
temperature: data.temperature,
context_length: data.contextLength,
enabled: data.enabled ?? true,
};
const response = await apiRequest<AnyRecord>('/llm', { method: 'POST', body: payload });
return mapLLMModel(response);
};
export const updateLLMModel = async (id: string, data: Partial<LLMModel>): Promise<LLMModel> => {
const payload = {
name: data.name,
vendor: data.vendor,
type: data.type,
base_url: data.baseUrl,
api_key: data.apiKey,
model_name: data.modelName,
temperature: data.temperature,
context_length: data.contextLength,
enabled: data.enabled,
};
const response = await apiRequest<AnyRecord>(`/llm/${id}`, { method: 'PUT', body: payload });
return mapLLMModel(response);
};
export const deleteLLMModel = async (id: string): Promise<void> => {
await apiRequest(`/llm/${id}`, { method: 'DELETE' });
};
export type LLMPreviewResult = {
success: boolean;
reply?: string;
usage?: Record<string, number>;
latency_ms?: number;
error?: string;
};
export const previewLLMModel = async (
id: string,
payload: { message: string; system_prompt?: string; max_tokens?: number; temperature?: number; api_key?: string }
): Promise<LLMPreviewResult> => {
return apiRequest<LLMPreviewResult>(`/llm/${id}/preview`, {
method: 'POST',
body: payload,
});
};
export const fetchWorkflows = async (): Promise<Workflow[]> => {
const response = await apiRequest<{ list?: AnyRecord[] } | AnyRecord[]>('/workflows');
const list = Array.isArray(response) ? response : (response.list || []);