Fix runtime config debug drawer

This commit is contained in:
Xin Wang
2026-02-09 08:51:44 +08:00
parent 59f326f8e5
commit 0643780c63

View File

@@ -1193,19 +1193,32 @@ export const DebugDrawer: React.FC<{
const buildLocalResolvedRuntime = () => { const buildLocalResolvedRuntime = () => {
const warnings: string[] = []; const warnings: string[] = [];
const services: Record<string, any> = {}; const services: Record<string, any> = {};
const isExternalLlm = assistant.configMode === 'dify' || assistant.configMode === 'fastgpt';
if (assistant.llmModelId) { if (isExternalLlm) {
const llm = llmModels.find((item) => item.id === assistant.llmModelId); services.llm = {
if (llm) { provider: 'openai',
services.llm = { model: '',
provider: 'openai', apiKey: assistant.apiKey || '',
model: llm.modelName || llm.name, baseUrl: assistant.apiUrl || '',
apiKey: llm.apiKey, };
baseUrl: llm.baseUrl, if (!assistant.apiUrl) warnings.push(`External LLM API URL is empty for mode: ${assistant.configMode}`);
}; if (!assistant.apiKey) warnings.push(`External LLM API key is empty for mode: ${assistant.configMode}`);
} else { } else if (assistant.llmModelId) {
warnings.push(`LLM model not found in loaded list: ${assistant.llmModelId}`); const llm = llmModels.find((item) => item.id === assistant.llmModelId);
} if (llm) {
services.llm = {
provider: 'openai',
model: llm.modelName || llm.name,
apiKey: llm.apiKey,
baseUrl: llm.baseUrl,
};
} else {
warnings.push(`LLM model not found in loaded list: ${assistant.llmModelId}`);
}
} else {
// Keep empty object to indicate engine should use default provider model.
services.llm = {};
} }
if (assistant.asrModelId) { if (assistant.asrModelId) {
@@ -1245,9 +1258,10 @@ export const DebugDrawer: React.FC<{
const localResolved = { const localResolved = {
assistantId: assistant.id, assistantId: assistant.id,
sources: { sources: {
llmModelId: assistant.llmModelId || '', llmModelId: isExternalLlm ? '' : (assistant.llmModelId || ''),
asrModelId: assistant.asrModelId || '', asrModelId: assistant.asrModelId || '',
voiceId: assistant.voice || '', voiceId: assistant.voice || '',
llmConfigMode: assistant.configMode || 'platform',
}, },
warnings, warnings,
sessionStartMetadata: { sessionStartMetadata: {