140 lines
4.0 KiB
Python
140 lines
4.0 KiB
Python
"""Regression tests for the interactive example helpers."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import importlib.util
|
|
import sys
|
|
import types
|
|
from pathlib import Path
|
|
|
|
|
|
REPO_ROOT = Path(__file__).resolve().parents[1]
|
|
CHAT_CLI_PATH = REPO_ROOT / "examples" / "chat_cli.py"
|
|
|
|
|
|
def _load_chat_cli_module():
|
|
module_name = "_test_chat_cli"
|
|
existing = sys.modules.get(module_name)
|
|
if existing is not None:
|
|
return existing
|
|
|
|
dotenv_module = sys.modules.get("dotenv")
|
|
if dotenv_module is None:
|
|
dotenv_module = types.ModuleType("dotenv")
|
|
dotenv_module.load_dotenv = lambda *args, **kwargs: None
|
|
sys.modules["dotenv"] = dotenv_module
|
|
|
|
original_fastgpt_client = sys.modules.get("fastgpt_client")
|
|
stub_fastgpt_client = types.ModuleType("fastgpt_client")
|
|
stub_fastgpt_client.ChatClient = object
|
|
stub_fastgpt_client.FastGPTInteractiveEvent = object
|
|
stub_fastgpt_client.iter_stream_events = lambda response: iter(())
|
|
sys.modules["fastgpt_client"] = stub_fastgpt_client
|
|
|
|
spec = importlib.util.spec_from_file_location(module_name, CHAT_CLI_PATH)
|
|
assert spec is not None and spec.loader is not None
|
|
module = importlib.util.module_from_spec(spec)
|
|
sys.modules[module_name] = module
|
|
try:
|
|
spec.loader.exec_module(module)
|
|
finally:
|
|
if original_fastgpt_client is None:
|
|
sys.modules.pop("fastgpt_client", None)
|
|
else:
|
|
sys.modules["fastgpt_client"] = original_fastgpt_client
|
|
return module
|
|
|
|
|
|
def test_interactive_prompt_text_uses_opener_when_prompt_is_missing():
|
|
chat_cli = _load_chat_cli_module()
|
|
|
|
prompt_text = chat_cli._interactive_prompt_text(
|
|
{
|
|
"params": {
|
|
"opener": "Please tell me about your business.",
|
|
"inputForm": [{"label": "Business type"}],
|
|
}
|
|
},
|
|
"Please provide the requested input",
|
|
)
|
|
|
|
assert prompt_text == "Please tell me about your business."
|
|
|
|
|
|
def test_interactive_prompt_text_keeps_opener_and_prompt():
|
|
chat_cli = _load_chat_cli_module()
|
|
|
|
prompt_text = chat_cli._interactive_prompt_text(
|
|
{
|
|
"opener": "A few details will help me tailor the answer.",
|
|
"prompt": "Which plan are you evaluating?",
|
|
},
|
|
"Please select an option",
|
|
)
|
|
|
|
assert prompt_text == "A few details will help me tailor the answer.\nWhich plan are you evaluating?"
|
|
|
|
|
|
def test_extract_chat_init_opener_prefers_welcome_text():
|
|
chat_cli = _load_chat_cli_module()
|
|
|
|
opener = chat_cli._extract_chat_init_opener(
|
|
{
|
|
"data": {
|
|
"app": {
|
|
"chatConfig": {"welcomeText": "Welcome from chat config."},
|
|
"intro": "Fallback intro.",
|
|
}
|
|
}
|
|
}
|
|
)
|
|
|
|
assert opener == "Welcome from chat config."
|
|
|
|
|
|
def test_extract_chat_init_opener_falls_back_to_intro():
|
|
chat_cli = _load_chat_cli_module()
|
|
|
|
opener = chat_cli._extract_chat_init_opener(
|
|
{
|
|
"data": {
|
|
"app": {
|
|
"intro": "Tell me what you're working on.",
|
|
}
|
|
}
|
|
}
|
|
)
|
|
|
|
assert opener == "Tell me what you're working on."
|
|
|
|
|
|
def test_get_initial_app_opener_uses_chat_init():
|
|
chat_cli = _load_chat_cli_module()
|
|
|
|
original_app_id = chat_cli.APP_ID
|
|
chat_cli.APP_ID = "app-123"
|
|
|
|
class _Response:
|
|
def raise_for_status(self):
|
|
return None
|
|
|
|
def json(self):
|
|
return {"data": {"app": {"chatConfig": {"welcomeText": "Hello from init."}}}}
|
|
|
|
class _Client:
|
|
def __init__(self):
|
|
self.calls = []
|
|
|
|
def get_chat_init(self, **kwargs):
|
|
self.calls.append(kwargs)
|
|
return _Response()
|
|
|
|
client = _Client()
|
|
try:
|
|
opener = chat_cli._get_initial_app_opener(client, "chat-123")
|
|
finally:
|
|
chat_cli.APP_ID = original_app_id
|
|
|
|
assert opener == "Hello from init."
|
|
assert client.calls == [{"appId": "app-123", "chatId": "chat-123"}]
|