feat: enhance chat CLI and TUI with initial opener handling and improved prompt logic

This commit is contained in:
Xin Wang
2026-03-10 23:55:53 +08:00
parent ef2614a70a
commit a55ca37c39
5 changed files with 278 additions and 26 deletions

View File

@@ -162,13 +162,18 @@ class TestAsyncFastGPTClientSendRequest:
mock_response = Mock(spec=httpx.Response)
mock_response.status_code = 200
# Track if close was called on the response
original_close_called = []
original_sync_close_called = []
original_async_close_called = []
async def original_close():
original_close_called.append(True)
def original_close():
original_sync_close_called.append(True)
raise AssertionError("sync close should not be used for async streaming responses")
mock_response.close = original_close
async def original_aclose():
original_async_close_called.append(True)
mock_response.close = Mock(side_effect=original_close)
mock_response.aclose = AsyncMock(side_effect=original_aclose)
mock_stream_context = AsyncContextManagerMock(mock_response)
@@ -182,8 +187,9 @@ class TestAsyncFastGPTClientSendRequest:
# Verify stream context exit was called
mock_stream_context.__aexit__.assert_called_once_with(None, None, None)
# Verify the original close was called
assert len(original_close_called) == 1
# Verify async cleanup path was used instead of sync close()
assert len(original_sync_close_called) == 0
assert len(original_async_close_called) == 1
await client.close()
@pytest.mark.asyncio

139
tests/test_chat_examples.py Normal file
View File

@@ -0,0 +1,139 @@
"""Regression tests for the interactive example helpers."""
from __future__ import annotations
import importlib.util
import sys
import types
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[1]
CHAT_CLI_PATH = REPO_ROOT / "examples" / "chat_cli.py"
def _load_chat_cli_module():
module_name = "_test_chat_cli"
existing = sys.modules.get(module_name)
if existing is not None:
return existing
dotenv_module = sys.modules.get("dotenv")
if dotenv_module is None:
dotenv_module = types.ModuleType("dotenv")
dotenv_module.load_dotenv = lambda *args, **kwargs: None
sys.modules["dotenv"] = dotenv_module
original_fastgpt_client = sys.modules.get("fastgpt_client")
stub_fastgpt_client = types.ModuleType("fastgpt_client")
stub_fastgpt_client.ChatClient = object
stub_fastgpt_client.FastGPTInteractiveEvent = object
stub_fastgpt_client.iter_stream_events = lambda response: iter(())
sys.modules["fastgpt_client"] = stub_fastgpt_client
spec = importlib.util.spec_from_file_location(module_name, CHAT_CLI_PATH)
assert spec is not None and spec.loader is not None
module = importlib.util.module_from_spec(spec)
sys.modules[module_name] = module
try:
spec.loader.exec_module(module)
finally:
if original_fastgpt_client is None:
sys.modules.pop("fastgpt_client", None)
else:
sys.modules["fastgpt_client"] = original_fastgpt_client
return module
def test_interactive_prompt_text_uses_opener_when_prompt_is_missing():
chat_cli = _load_chat_cli_module()
prompt_text = chat_cli._interactive_prompt_text(
{
"params": {
"opener": "Please tell me about your business.",
"inputForm": [{"label": "Business type"}],
}
},
"Please provide the requested input",
)
assert prompt_text == "Please tell me about your business."
def test_interactive_prompt_text_keeps_opener_and_prompt():
chat_cli = _load_chat_cli_module()
prompt_text = chat_cli._interactive_prompt_text(
{
"opener": "A few details will help me tailor the answer.",
"prompt": "Which plan are you evaluating?",
},
"Please select an option",
)
assert prompt_text == "A few details will help me tailor the answer.\nWhich plan are you evaluating?"
def test_extract_chat_init_opener_prefers_welcome_text():
chat_cli = _load_chat_cli_module()
opener = chat_cli._extract_chat_init_opener(
{
"data": {
"app": {
"chatConfig": {"welcomeText": "Welcome from chat config."},
"intro": "Fallback intro.",
}
}
}
)
assert opener == "Welcome from chat config."
def test_extract_chat_init_opener_falls_back_to_intro():
chat_cli = _load_chat_cli_module()
opener = chat_cli._extract_chat_init_opener(
{
"data": {
"app": {
"intro": "Tell me what you're working on.",
}
}
}
)
assert opener == "Tell me what you're working on."
def test_get_initial_app_opener_uses_chat_init():
chat_cli = _load_chat_cli_module()
original_app_id = chat_cli.APP_ID
chat_cli.APP_ID = "app-123"
class _Response:
def raise_for_status(self):
return None
def json(self):
return {"data": {"app": {"chatConfig": {"welcomeText": "Hello from init."}}}}
class _Client:
def __init__(self):
self.calls = []
def get_chat_init(self, **kwargs):
self.calls.append(kwargs)
return _Response()
client = _Client()
try:
opener = chat_cli._get_initial_app_opener(client, "chat-123")
finally:
chat_cli.APP_ID = original_app_id
assert opener == "Hello from init."
assert client.calls == [{"appId": "app-123", "chatId": "chat-123"}]