Add fastgpt as seperate assistant mode
This commit is contained in:
@@ -28,7 +28,7 @@ from providers.tts.volcengine import VolcengineTTSService
|
||||
_OPENAI_COMPATIBLE_PROVIDERS = {"openai_compatible", "openai-compatible", "siliconflow"}
|
||||
_DASHSCOPE_PROVIDERS = {"dashscope"}
|
||||
_VOLCENGINE_PROVIDERS = {"volcengine"}
|
||||
_SUPPORTED_LLM_PROVIDERS = {"openai", *_OPENAI_COMPATIBLE_PROVIDERS}
|
||||
_SUPPORTED_LLM_PROVIDERS = {"openai", "fastgpt", *_OPENAI_COMPATIBLE_PROVIDERS}
|
||||
|
||||
|
||||
class DefaultRealtimeServiceFactory(RealtimeServiceFactory):
|
||||
@@ -58,7 +58,18 @@ class DefaultRealtimeServiceFactory(RealtimeServiceFactory):
|
||||
|
||||
def create_llm_service(self, spec: LLMServiceSpec) -> LLMPort:
|
||||
provider = self._normalize_provider(spec.provider)
|
||||
if provider in _SUPPORTED_LLM_PROVIDERS and spec.api_key:
|
||||
if provider == "fastgpt" and spec.api_key and spec.base_url:
|
||||
from providers.llm.fastgpt import FastGPTLLMService
|
||||
|
||||
return FastGPTLLMService(
|
||||
api_key=spec.api_key,
|
||||
base_url=spec.base_url,
|
||||
app_id=spec.app_id,
|
||||
model=spec.model,
|
||||
system_prompt=spec.system_prompt,
|
||||
)
|
||||
|
||||
if provider in _SUPPORTED_LLM_PROVIDERS and provider != "fastgpt" and spec.api_key:
|
||||
return OpenAILLMService(
|
||||
api_key=spec.api_key,
|
||||
base_url=spec.base_url,
|
||||
|
||||
Reference in New Issue
Block a user