Update llm library web interface

This commit is contained in:
Xin Wang
2026-02-08 23:55:40 +08:00
parent 6462c4f432
commit be68e335f1
6 changed files with 614 additions and 187 deletions

View File

@@ -244,3 +244,55 @@ class TestLLMModelAPI:
response = client.post("/api/llm", json=data)
assert response.status_code == 200
assert response.json()["type"] == "embedding"
def test_preview_llm_model_success(self, client, sample_llm_model_data, monkeypatch):
"""Test LLM preview endpoint returns model reply."""
from app.routers import llm as llm_router
create_response = client.post("/api/llm", json=sample_llm_model_data)
model_id = create_response.json()["id"]
class DummyResponse:
status_code = 200
def json(self):
return {
"choices": [{"message": {"content": "Preview OK"}}],
"usage": {"prompt_tokens": 10, "completion_tokens": 2, "total_tokens": 12}
}
@property
def text(self):
return '{"ok":true}'
class DummyClient:
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
def post(self, url, json=None, headers=None):
assert url.endswith("/chat/completions")
assert headers["Authorization"] == f"Bearer {sample_llm_model_data['api_key']}"
assert json["messages"][0]["role"] == "user"
return DummyResponse()
monkeypatch.setattr(llm_router.httpx, "Client", DummyClient)
response = client.post(f"/api/llm/{model_id}/preview", json={"message": "hello"})
assert response.status_code == 200
data = response.json()
assert data["success"] is True
assert data["reply"] == "Preview OK"
def test_preview_llm_model_reject_empty_message(self, client, sample_llm_model_data):
"""Test LLM preview endpoint validates message."""
create_response = client.post("/api/llm", json=sample_llm_model_data)
model_id = create_response.json()["id"]
response = client.post(f"/api/llm/{model_id}/preview", json={"message": " "})
assert response.status_code == 400