feat: Implement Dify LLM provider and update related configurations and tests
This commit is contained in:
@@ -302,8 +302,8 @@ def _resolve_runtime_metadata(db: Session, assistant: Assistant) -> tuple[Dict[s
|
||||
|
||||
if config_mode == "dify":
|
||||
metadata["services"]["llm"] = {
|
||||
"provider": "openai",
|
||||
"model": "",
|
||||
"provider": "dify",
|
||||
"model": "dify",
|
||||
"apiKey": assistant.api_key,
|
||||
"baseUrl": assistant.api_url,
|
||||
}
|
||||
|
||||
@@ -438,3 +438,19 @@ class TestAssistantAPI:
|
||||
metadata = runtime_resp.json()["sessionStartMetadata"]
|
||||
assert metadata["services"]["llm"]["provider"] == "fastgpt"
|
||||
assert metadata["services"]["llm"]["appId"] == "app-fastgpt-123"
|
||||
|
||||
def test_dify_runtime_config_uses_dify_provider(self, client, sample_assistant_data):
|
||||
sample_assistant_data.update({
|
||||
"configMode": "dify",
|
||||
"apiUrl": "https://api.dify.ai/v1",
|
||||
"apiKey": "dify-key",
|
||||
})
|
||||
assistant_resp = client.post("/api/assistants", json=sample_assistant_data)
|
||||
assert assistant_resp.status_code == 200
|
||||
assistant_id = assistant_resp.json()["id"]
|
||||
|
||||
runtime_resp = client.get(f"/api/assistants/{assistant_id}/runtime-config")
|
||||
assert runtime_resp.status_code == 200
|
||||
metadata = runtime_resp.json()["sessionStartMetadata"]
|
||||
assert metadata["services"]["llm"]["provider"] == "dify"
|
||||
assert metadata["services"]["llm"]["model"] == "dify"
|
||||
|
||||
Reference in New Issue
Block a user