feat: add chat_tui example for FastGPT with Textual interface

- Introduced a new example script `chat_tui.py` that provides a full-screen Textual interface for interacting with FastGPT.
- Implemented streaming chat updates, workflow logging, and modal handling for interactive nodes.
- Enhanced FastGPT client with new streaming capabilities and structured event types for better interaction handling.
- Normalized base URL handling in the client to prevent duplicate `/api` paths.
- Added tests for streaming event parsing and interaction handling.
This commit is contained in:
Xin Wang
2026-03-10 15:34:27 +08:00
parent eab8e15cd6
commit ef2614a70a
14 changed files with 1562 additions and 28 deletions

View File

@@ -1,4 +1,3 @@
API_KEY=""
BASE_URL=""
CHAT_ID=""
APP_ID=""
APP_ID=""

View File

@@ -153,29 +153,29 @@ def delete_chat_item():
if __name__ == "__main__":
# print("=== Simple Chat ===")
# try:
# simple_chat()
# except Exception as e:
# print(f"Error: {e}")
print("=== Simple Chat ===")
try:
simple_chat()
except Exception as e:
print(f"Error: {e}")
# print("\n=== Streaming Chat ===")
# try:
# streaming_chat()
# except Exception as e:
# print(f"Error: {e}")
print("\n=== Streaming Chat ===")
try:
streaming_chat()
except Exception as e:
print(f"Error: {e}")
# print("\n=== Chat with Context ===")
# try:
# chat_with_context()
# except Exception as e:
# print(f"Error: {e}")
print("\n=== Chat with Context ===")
try:
chat_with_context()
except Exception as e:
print(f"Error: {e}")
# print("\n=== Get Histories ===")
# try:
# get_histories()
# except Exception as e:
# print(f"Error: {e}")
print("\n=== Get Histories ===")
try:
get_histories()
except Exception as e:
print(f"Error: {e}")
print("\n=== Delete Chat Item ===")
try:

431
examples/chat_cli.py Normal file
View File

@@ -0,0 +1,431 @@
"""Interactive chat CLI with FastGPT streaming events.
Run from the examples directory with .env configured:
python chat_cli.py
python chat_cli.py --chat-id my_existing_conversation
This example supports:
- streaming text output
- workflow/tool event logs
- interactive FastGPT nodes (`userSelect` and `userInput`)
Type your message and press Enter. Type /quit to exit.
During an interactive prompt, type /cancel to stop that prompt locally.
"""
from __future__ import annotations
import argparse
import json
import os
import sys
import uuid
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Dict, List, Optional
from dotenv import load_dotenv
from fastgpt_client import ChatClient, FastGPTInteractiveEvent, iter_stream_events
load_dotenv(Path(__file__).with_name(".env"))
API_KEY = os.getenv("API_KEY")
BASE_URL = os.getenv("BASE_URL")
for stream in (sys.stdout, sys.stderr):
if hasattr(stream, "reconfigure"):
try:
stream.reconfigure(encoding="utf-8", errors="replace")
except Exception:
pass
@dataclass
class StreamTurnResult:
text: str
interactive: Optional[FastGPTInteractiveEvent] = None
def _write_text(text: str) -> None:
try:
sys.stdout.write(text)
except UnicodeEncodeError:
encoding = getattr(sys.stdout, "encoding", None) or "utf-8"
safe_text = text.encode(encoding, errors="replace").decode(encoding, errors="replace")
sys.stdout.write(safe_text)
sys.stdout.flush()
def _extract_text_from_event(kind: str, payload: Any) -> str:
if not isinstance(payload, dict):
return ""
if kind in {"answer", "fastAnswer"}:
text = payload.get("text")
if isinstance(text, str) and text:
return text
choices = payload.get("choices") if isinstance(payload.get("choices"), list) else []
if not choices:
return str(payload.get("text") or "")
first_choice = choices[0] if isinstance(choices[0], dict) else {}
delta = first_choice.get("delta") if isinstance(first_choice.get("delta"), dict) else {}
content = delta.get("content")
if isinstance(content, str) and content:
return content
message = first_choice.get("message") if isinstance(first_choice.get("message"), dict) else {}
message_content = message.get("content")
if isinstance(message_content, str) and message_content:
return message_content
return ""
def _nested_tool_payload(payload: Any) -> Dict[str, Any]:
if not isinstance(payload, dict):
return {}
nested = payload.get("tool")
if isinstance(nested, dict):
return nested
return {}
def _tool_name_from_event(payload: Any) -> str:
if not isinstance(payload, dict):
return "?"
nested = _nested_tool_payload(payload)
return str(payload.get("toolName") or nested.get("toolName") or payload.get("functionName") or nested.get("functionName") or "?")
def _normalize_option(raw_option: Any, index: int) -> Optional[Dict[str, str]]:
if isinstance(raw_option, str):
value = raw_option.strip()
if not value:
return None
return {"id": f"option_{index}", "label": value, "value": value, "description": ""}
if not isinstance(raw_option, dict):
return None
label = str(raw_option.get("label") or raw_option.get("value") or raw_option.get("id") or "").strip()
value = str(raw_option.get("value") or raw_option.get("label") or raw_option.get("id") or "").strip()
option_id = str(raw_option.get("id") or value or f"option_{index}").strip()
if not label and not value:
return None
return {
"id": option_id or f"option_{index}",
"label": label or value,
"value": value or label,
"description": str(raw_option.get("description") or "").strip(),
}
def _resolve_option_token(token: str, options: List[Dict[str, str]]) -> Optional[str]:
normalized = token.strip()
if not normalized:
return None
if normalized.isdigit():
index = int(normalized) - 1
if 0 <= index < len(options):
return options[index]["value"]
lowered = normalized.lower()
for option in options:
if lowered in {
option["id"].lower(),
option["label"].lower(),
option["value"].lower(),
}:
return option["value"]
return None
def _interactive_prompt_text(payload: Dict[str, Any], default_text: str) -> str:
params = payload.get("params") if isinstance(payload.get("params"), dict) else {}
return str(
payload.get("prompt")
or payload.get("title")
or payload.get("text")
or payload.get("description")
or params.get("description")
or default_text
).strip()
def _prompt_user_select(event: FastGPTInteractiveEvent) -> Optional[str]:
payload = event.data
params = payload.get("params") if isinstance(payload.get("params"), dict) else {}
prompt_text = _interactive_prompt_text(payload, "Please select an option")
multiple = bool(params.get("multiple") or payload.get("multiple"))
raw_options = params.get("userSelectOptions") if isinstance(params.get("userSelectOptions"), list) else []
options = [item for index, raw in enumerate(raw_options, start=1) if (item := _normalize_option(raw, index))]
print()
print(f"[INTERACTIVE] {prompt_text}")
for index, option in enumerate(options, start=1):
print(f" {index}. {option['label']}")
if option["description"]:
print(f" {option['description']}")
if not options:
print("No selectable options were provided by FastGPT.")
return None
while True:
hint = "comma-separated indexes/values" if multiple else "an index or value"
raw_input_value = input(f"Select {hint} (/cancel to stop): ").strip()
if not raw_input_value:
print("Selection is required.")
continue
if raw_input_value.lower() == "/cancel":
return None
tokens = [part.strip() for part in raw_input_value.split(",")] if multiple else [raw_input_value]
selected_values: List[str] = []
invalid_tokens: List[str] = []
for token in tokens:
resolved = _resolve_option_token(token, options)
if resolved is None:
invalid_tokens.append(token)
continue
selected_values.append(resolved)
if invalid_tokens:
print(f"Invalid option(s): {', '.join(invalid_tokens)}")
continue
if not selected_values:
print("Selection is required.")
continue
if not multiple:
return selected_values[0]
return ", ".join(selected_values)
def _prompt_user_input(event: FastGPTInteractiveEvent) -> Optional[str]:
payload = event.data
params = payload.get("params") if isinstance(payload.get("params"), dict) else {}
prompt_text = _interactive_prompt_text(payload, "Please provide the requested input")
form_fields = params.get("inputForm") if isinstance(params.get("inputForm"), list) else []
print()
print(f"[INTERACTIVE] {prompt_text}")
if not form_fields:
value = input("Input (/cancel to stop): ").strip()
if value.lower() == "/cancel":
return None
return value
values: Dict[str, Any] = {}
for index, field in enumerate(form_fields, start=1):
if not isinstance(field, dict):
continue
name = str(field.get("key") or field.get("name") or f"field_{index}").strip() or f"field_{index}"
label = str(field.get("label") or field.get("name") or name).strip() or name
placeholder = str(field.get("placeholder") or "").strip()
default_value = field.get("defaultValue", field.get("default"))
required = bool(field.get("required"))
prompt_label = label
if placeholder:
prompt_label = f"{prompt_label} ({placeholder})"
if default_value not in (None, ""):
prompt_label = f"{prompt_label} [{default_value}]"
if not required:
prompt_label = f"{prompt_label} [optional]"
prompt_label = f"{prompt_label}: "
while True:
raw_input_value = input(prompt_label).strip()
if raw_input_value.lower() == "/cancel":
return None
if not raw_input_value and default_value not in (None, ""):
raw_input_value = str(default_value)
if raw_input_value or not required:
values[name] = raw_input_value
break
print("This field is required.")
return json.dumps(values, ensure_ascii=False)
def prompt_interactive(event: FastGPTInteractiveEvent) -> Optional[str]:
if event.interaction_type == "userInput":
return _prompt_user_input(event)
return _prompt_user_select(event)
def stream_reply(client: ChatClient, messages: List[Dict[str, Any]], chat_id: str) -> StreamTurnResult:
"""Stream a single FastGPT request and stop when interactive input is required."""
response = client.create_chat_completion(
messages=messages,
stream=True,
detail=True,
chatId=chat_id,
)
response.raise_for_status()
full_content: List[str] = []
interactive_event: Optional[FastGPTInteractiveEvent] = None
printed_text = False
try:
for event in iter_stream_events(response):
if event.kind in {"data", "answer", "fastAnswer"}:
content = _extract_text_from_event(event.kind, event.data)
if content:
printed_text = True
full_content.append(content)
_write_text(content)
continue
if event.kind == "flowNodeStatus":
if printed_text:
print()
printed_text = False
if isinstance(event.data, dict):
status = str(event.data.get("status") or "?")
node_name = str(event.data.get("nodeName") or event.data.get("name") or event.data.get("node_id") or "Unknown node")
print(f"[FLOW] {status}: {node_name}")
else:
print(f"[FLOW] {event.data}")
continue
if event.kind == "flowResponses":
if printed_text:
print()
printed_text = False
if isinstance(event.data, dict):
module_name = str(event.data.get("moduleName") or event.data.get("nodeName") or "Unknown module")
print(f"[FLOW] response from: {module_name}")
elif isinstance(event.data, list):
print(f"[FLOW] response details: {len(event.data)} module record(s)")
else:
print(f"[FLOW] response details: {event.data}")
continue
if event.kind == "toolCall":
if printed_text:
print()
printed_text = False
tool_name = _tool_name_from_event(event.data)
print(f"[TOOL] Calling: {tool_name}")
continue
if event.kind == "toolParams":
if printed_text:
print()
printed_text = False
print(f"[TOOL] Params: {event.data}")
continue
if event.kind == "toolResponse":
if printed_text:
print()
printed_text = False
print(f"[TOOL] Response: {event.data}")
continue
if event.kind == "updateVariables":
if printed_text:
print()
printed_text = False
if isinstance(event.data, dict):
print(f"[VARS] Updated: {event.data.get('variables') or event.data}")
else:
print(f"[VARS] Updated: {event.data}")
continue
if event.kind == "interactive":
if printed_text:
print()
interactive_event = event
break
if event.kind == "error":
if printed_text:
print()
message = str(event.data.get("message") or event.data.get("error") or "Unknown FastGPT error")
raise RuntimeError(message)
if event.kind == "done":
break
finally:
response.close()
return StreamTurnResult(text="".join(full_content), interactive=interactive_event)
def _parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="FastGPT interactive chat CLI")
parser.add_argument(
"--chat-id",
dest="chat_id",
help="Reuse an existing FastGPT chatId. Defaults to a random chat_tui_* value.",
)
return parser.parse_args()
def main() -> None:
args = _parse_args()
if not API_KEY or not BASE_URL:
print("Set API_KEY and BASE_URL in .env (see .env.example)")
sys.exit(1)
chat_id = args.chat_id or f"chat_tui_{uuid.uuid4().hex[:12]}"
print("FastGPT Chat (interactive streaming). Type /quit to exit.\n")
print(f"Using chatId: {chat_id}\n")
with ChatClient(api_key=API_KEY, base_url=BASE_URL) as client:
while True:
try:
user_input = input("You: ").strip()
except (EOFError, KeyboardInterrupt):
print("\nBye.")
break
if not user_input:
continue
if user_input.lower() in {"/quit", "/exit", "/q"}:
print("Bye.")
break
pending_messages = [{"role": "user", "content": user_input}]
assistant_parts: List[str] = []
print("Assistant: ", end="", flush=True)
while True:
try:
turn_result = stream_reply(client, pending_messages, chat_id)
except Exception as exc:
print(f"\nError: {exc}")
break
if turn_result.text:
assistant_parts.append(turn_result.text)
if turn_result.interactive is None:
print()
break
follow_up = prompt_interactive(turn_result.interactive)
if follow_up is None:
print("[INTERACTIVE] Prompt cancelled locally.")
break
pending_messages = [{"role": "user", "content": follow_up}]
print("Assistant (resume): ", end="", flush=True)
if assistant_parts:
print("-" * 40)
if __name__ == "__main__":
main()

683
examples/chat_tui.py Normal file
View File

@@ -0,0 +1,683 @@
"""Textual FastGPT workbench styled like a coding assistant terminal.
Run from the examples directory with .env configured:
python chat_tui.py
python chat_tui.py --chat-id my_existing_conversation
This example provides:
- a full-screen Textual interface
- streaming chat updates
- workflow / tool event logging
- modal handling for FastGPT interactive nodes
"""
from __future__ import annotations
import argparse
import json
import sys
import uuid
from pathlib import Path
from typing import Any, Dict, List, Optional
from textual import on, work
from textual.app import App, ComposeResult
from textual.binding import Binding
from textual.containers import Container, Horizontal, Vertical, VerticalScroll
from textual.screen import ModalScreen
from textual.widgets import Button, Checkbox, Footer, Input, RichLog, Static, TextArea
EXAMPLES_DIR = Path(__file__).resolve().parent
if str(EXAMPLES_DIR) not in sys.path:
sys.path.insert(0, str(EXAMPLES_DIR))
from chat_cli import (
API_KEY,
BASE_URL,
_extract_text_from_event,
_interactive_prompt_text,
_normalize_option,
_tool_name_from_event,
)
from fastgpt_client import ChatClient, FastGPTInteractiveEvent, iter_stream_events
class MessageCard(Static):
"""Lightweight message block used in the transcript pane."""
def __init__(self, role: str, title: str, content: str, widget_id: str) -> None:
super().__init__(content or " ", id=widget_id, classes=f"message {role}")
self.role = role
self.content_text = content
self.border_title = title
def set_text(self, content: str) -> None:
self.content_text = content
self.update(content or " ")
def append_text(self, chunk: str) -> None:
if self.content_text in {"", "Thinking…"}:
self.content_text = chunk
else:
self.content_text += chunk
self.update(self.content_text or " ")
class InteractiveInputScreen(ModalScreen[Optional[str]]):
"""Modal used for FastGPT userInput interactions."""
CSS = """
InteractiveInputScreen {
align: center middle;
background: rgba(4, 7, 11, 0.82);
}
.dialog {
width: 84;
max-width: 110;
background: #101620;
border: round #47c6b3;
padding: 1 2;
}
.dialog-title {
color: #f5d76e;
text-style: bold;
margin-bottom: 1;
}
.field-label {
color: #c6d2df;
margin-top: 1;
}
.dialog-actions {
margin-top: 1;
height: auto;
}
.dialog-actions Button {
width: 1fr;
margin-right: 1;
}
.validation {
color: #ffb4a2;
margin-top: 1;
}
"""
def __init__(self, event: FastGPTInteractiveEvent) -> None:
super().__init__()
self.event = event
self.prompt_text = _interactive_prompt_text(event.data, "Please provide the requested input")
raw_fields = event.data.get("params", {}).get("inputForm", [])
self.fields: List[Dict[str, Any]] = []
for index, raw_field in enumerate(raw_fields, start=1):
if not isinstance(raw_field, dict):
continue
key = str(raw_field.get("key") or raw_field.get("name") or f"field_{index}").strip() or f"field_{index}"
label = str(raw_field.get("label") or raw_field.get("name") or key).strip() or key
placeholder = str(raw_field.get("placeholder") or raw_field.get("description") or "").strip()
default_value = raw_field.get("defaultValue", raw_field.get("default"))
required = bool(raw_field.get("required"))
self.fields.append(
{
"key": key,
"label": label,
"placeholder": placeholder,
"default": "" if default_value in (None, "") else str(default_value),
"required": required,
}
)
def compose(self) -> ComposeResult:
with Container(classes="dialog"):
yield Static(self.prompt_text, classes="dialog-title")
if not self.fields:
yield Static("FastGPT did not provide structured fields. Enter a single value below.", classes="field-label")
yield Input(placeholder="Workflow input", id="input-freeform")
else:
for index, field in enumerate(self.fields, start=1):
suffix = "" if field["required"] else " [optional]"
yield Static(f"{index}. {field['label']}{suffix}", classes="field-label")
yield Input(
value=field["default"],
placeholder=field["placeholder"],
id=f"input-{index}",
)
yield Static("", id="validation", classes="validation")
with Horizontal(classes="dialog-actions"):
yield Button("Cancel", id="cancel")
yield Button("Submit", id="submit", variant="primary")
def on_mount(self) -> None:
inputs = list(self.query(Input))
if inputs:
inputs[0].focus()
else:
self.query_one("#submit", Button).focus()
@on(Button.Pressed)
def handle_button(self, event: Button.Pressed) -> None:
if event.button.id == "cancel":
self.dismiss(None)
return
if not self.fields:
value = self.query_one("#input-freeform", Input).value.strip()
self.dismiss(None if not value else value)
return
payload: Dict[str, Any] = {}
for index, field in enumerate(self.fields, start=1):
value = self.query_one(f"#input-{index}", Input).value.strip()
if not value and field["required"]:
self.query_one("#validation", Static).update(f"{field['label']} is required.")
return
payload[field["key"]] = value
self.dismiss(json.dumps(payload, ensure_ascii=False))
class InteractiveSelectScreen(ModalScreen[Optional[str]]):
"""Modal used for FastGPT userSelect interactions."""
CSS = """
InteractiveSelectScreen {
align: center middle;
background: rgba(4, 7, 11, 0.82);
}
.dialog {
width: 84;
max-width: 110;
background: #101620;
border: round #f5d76e;
padding: 1 2;
}
.dialog-title {
color: #f5d76e;
text-style: bold;
margin-bottom: 1;
}
.option-description {
color: #8fa1b3;
margin-left: 2;
margin-bottom: 1;
}
.dialog-actions {
margin-top: 1;
height: auto;
}
.dialog-actions Button {
width: 1fr;
margin-right: 1;
}
.choice-button {
width: 1fr;
margin-top: 1;
}
.validation {
color: #ffb4a2;
margin-top: 1;
}
"""
def __init__(self, event: FastGPTInteractiveEvent) -> None:
super().__init__()
self.event = event
payload = event.data
params = payload.get("params") if isinstance(payload.get("params"), dict) else {}
self.prompt_text = _interactive_prompt_text(payload, "Please select an option")
self.multiple = bool(params.get("multiple") or payload.get("multiple"))
raw_options = params.get("userSelectOptions") if isinstance(params.get("userSelectOptions"), list) else []
self.options = [
item
for index, raw_option in enumerate(raw_options, start=1)
if (item := _normalize_option(raw_option, index))
]
def compose(self) -> ComposeResult:
with Container(classes="dialog"):
yield Static(self.prompt_text, classes="dialog-title")
if not self.options:
yield Static("FastGPT did not provide selectable options.", classes="option-description")
elif self.multiple:
for index, option in enumerate(self.options, start=1):
label = f"{index}. {option['label']}"
if option["description"]:
label = f"{label} - {option['description']}"
yield Checkbox(label, id=f"check-{index}")
else:
for index, option in enumerate(self.options, start=1):
yield Button(f"{index}. {option['label']}", id=f"choice-{index}", classes="choice-button")
if option["description"]:
yield Static(option["description"], classes="option-description")
yield Static("", id="validation", classes="validation")
with Horizontal(classes="dialog-actions"):
yield Button("Cancel", id="cancel")
if self.multiple:
yield Button("Submit", id="submit", variant="primary")
def on_mount(self) -> None:
buttons = [button for button in self.query(Button) if button.id and button.id != "cancel"]
if buttons:
buttons[0].focus()
else:
self.query_one("#cancel", Button).focus()
@on(Button.Pressed)
def handle_button(self, event: Button.Pressed) -> None:
button_id = event.button.id or ""
if button_id == "cancel":
self.dismiss(None)
return
if button_id.startswith("choice-"):
index = int(button_id.split("-", 1)[1]) - 1
self.dismiss(self.options[index]["value"])
return
if button_id == "submit":
selected: List[str] = []
for index, option in enumerate(self.options, start=1):
if self.query_one(f"#check-{index}", Checkbox).value:
selected.append(option["value"])
if not selected:
self.query_one("#validation", Static).update("Select at least one option.")
return
self.dismiss(", ".join(selected))
class FastGPTWorkbench(App[None]):
"""Claude Code style Textual workbench for FastGPT streaming chat."""
CSS = """
Screen {
background: #0b0f14;
color: #e6edf3;
}
#shell {
layout: grid;
grid-size: 2;
grid-columns: 32 1fr;
height: 1fr;
}
#sidebar {
padding: 1 1 1 2;
background: #101620;
border-right: heavy #273244;
}
#brand {
color: #f5d76e;
text-style: bold;
margin-bottom: 1;
}
.panel {
background: #0d131d;
border: round #2b3547;
padding: 1;
margin-bottom: 1;
}
#event_log {
height: 1fr;
background: #0d131d;
border: round #2b3547;
}
#main_panel {
layout: vertical;
}
#chat_title {
padding: 1 2;
background: #101620;
border-bottom: heavy #273244;
color: #f0f3f7;
text-style: bold;
}
#messages {
padding: 1 2;
height: 1fr;
}
.message {
width: 1fr;
padding: 1 2;
margin-bottom: 1;
background: #111723;
border: round #2b3547;
}
.user {
border-left: tall #f5d76e;
background: #16161d;
}
.assistant {
border-left: tall #47c6b3;
background: #0f1821;
}
.system {
border-left: tall #7aa2f7;
background: #101824;
}
.workflow {
border-left: tall #ff9e64;
background: #1b1712;
}
#composer_shell {
layout: vertical;
height: 12;
padding: 1 2;
background: #101620;
border-top: heavy #273244;
}
#composer {
height: 1fr;
background: #0d131d;
color: #eef4ff;
border: round #2b3547;
}
#composer_actions {
height: auto;
margin-top: 1;
}
#composer_actions Button {
width: 16;
margin-left: 1;
}
#composer_spacer {
width: 1fr;
}
"""
TITLE = "FastGPT Workbench"
SUB_TITLE = "Claude-style Textual TUI"
BINDINGS = [
Binding("ctrl+j", "send_message", "Send"),
Binding("ctrl+n", "new_chat", "New Chat"),
Binding("ctrl+c", "quit", "Quit"),
]
def __init__(self, chat_id: Optional[str] = None) -> None:
super().__init__()
self.chat_id = chat_id or self._new_chat_id()
self._message_counter = 0
self._busy = False
def compose(self) -> ComposeResult:
with Container(id="shell"):
with Vertical(id="sidebar"):
yield Static("FastGPT Workbench", id="brand")
yield Static("", id="session_panel", classes="panel")
yield Static("", id="status_panel", classes="panel")
yield Static("Ctrl+J send\nCtrl+N new chat\nEsc closes modal prompts", classes="panel")
yield RichLog(id="event_log", wrap=True, highlight=False, markup=False)
with Vertical(id="main_panel"):
yield Static("Claude-style FastGPT Console", id="chat_title")
yield VerticalScroll(id="messages")
with Vertical(id="composer_shell"):
yield TextArea("", id="composer")
with Horizontal(id="composer_actions"):
yield Static("", id="composer_spacer")
yield Button("New Chat", id="new_chat")
yield Button("Send", id="send", variant="primary")
yield Footer()
def on_mount(self) -> None:
if not API_KEY or not BASE_URL:
raise RuntimeError("Set API_KEY and BASE_URL in examples/.env before starting chat_tui.py")
self._refresh_sidebar()
self._set_status("Ready", "Fresh session")
self._append_message(
role="system",
title="Session",
content="Start typing below. FastGPT workflow events will appear in the left rail.",
)
self.query_one("#composer", TextArea).focus()
def _new_chat_id(self) -> str:
return f"chat_tui_{uuid.uuid4().hex[:12]}"
def _refresh_sidebar(self) -> None:
session_panel = self.query_one("#session_panel", Static)
base_url = BASE_URL or ""
session_panel.update(
f"Session\n\nchatId: {self.chat_id}\nbaseUrl: {base_url}"
)
def _set_status(self, heading: str, detail: str) -> None:
panel = self.query_one("#status_panel", Static)
panel.update(f"Status\n\n{heading}\n{detail}")
def _log_event(self, message: str) -> None:
self.query_one("#event_log", RichLog).write(message)
def _format_workflow_payload(self, content: str) -> str:
try:
return json.dumps(json.loads(content), ensure_ascii=False, indent=2)
except Exception:
return content
def _append_message(self, role: str, title: str, content: str) -> str:
self._message_counter += 1
widget_id = f"message-{self._message_counter}"
card = MessageCard(role=role, title=title, content=content, widget_id=widget_id)
messages = self.query_one("#messages", VerticalScroll)
messages.mount(card)
messages.scroll_end(animate=False)
return widget_id
def _assistant_card(self) -> str:
card_id = self._append_message("assistant", "FastGPT", "Thinking…")
self.query_one(f"#{card_id}", MessageCard).set_text("Thinking…")
return card_id
def _start_turn(self, content: str, *, title: str = "You", role: str = "user") -> None:
if self._busy:
self._log_event("[local] Busy streaming. Wait for the current turn to finish.")
return
display_content = self._format_workflow_payload(content) if role == "workflow" else content
self._append_message(role=role, title=title, content=display_content)
assistant_card_id = self._assistant_card()
self._busy = True
self._set_status("Streaming", "Receiving FastGPT output")
self._stream_turn(
messages=[{"role": "user", "content": content}],
assistant_card_id=assistant_card_id,
)
def _complete_turn(self, assistant_card_id: str, *, waiting_interactive: bool) -> None:
card = self.query_one(f"#{assistant_card_id}", MessageCard)
if waiting_interactive and not card.content_text.strip():
card.set_text("Waiting for workflow input…")
self._set_status("Interactive", "Provide the requested workflow input")
elif not card.content_text.strip():
card.set_text("(no text response)")
self._set_status("Ready", "Idle")
else:
self._set_status("Ready", "Idle")
self._busy = False
def _append_assistant_chunk(self, assistant_card_id: str, chunk: str) -> None:
card = self.query_one(f"#{assistant_card_id}", MessageCard)
card.append_text(chunk)
self.query_one("#messages", VerticalScroll).scroll_end(animate=False)
def _mark_turn_failed(self, assistant_card_id: str, message: str) -> None:
card = self.query_one(f"#{assistant_card_id}", MessageCard)
if card.content_text in {"", "Thinking…"}:
card.set_text(f"Error: {message}")
else:
card.append_text(f"\n\nError: {message}")
self._busy = False
self._set_status("Error", message)
self._log_event(f"[error] {message}")
def _handle_interactive_result(self, result: Optional[str]) -> None:
if result is None:
self._log_event("[interactive] Prompt cancelled locally.")
self._set_status("Ready", "Interactive prompt dismissed")
return
self._start_turn(result, title="Workflow Input", role="workflow")
def _present_interactive(self, event: FastGPTInteractiveEvent) -> None:
self._log_event(f"[interactive] {event.interaction_type}")
if event.interaction_type == "userInput":
self.push_screen(InteractiveInputScreen(event), self._handle_interactive_result)
return
self.push_screen(InteractiveSelectScreen(event), self._handle_interactive_result)
def action_send_message(self) -> None:
composer = self.query_one("#composer", TextArea)
content = composer.text.strip()
if not content:
return
composer.text = ""
composer.focus()
self._start_turn(content)
def action_new_chat(self) -> None:
if self._busy:
self._log_event("[local] Cannot reset chat while a turn is streaming.")
return
self.chat_id = self._new_chat_id()
self.query_one("#messages", VerticalScroll).remove_children()
self._refresh_sidebar()
self._set_status("Ready", "Started a new random session")
self._append_message(
role="system",
title="Session",
content="New chat created. Start typing below.",
)
self._log_event(f"[local] Started new chatId {self.chat_id}")
@on(Button.Pressed, "#send")
def _send_button(self, _: Button.Pressed) -> None:
self.action_send_message()
@on(Button.Pressed, "#new_chat")
def _new_chat_button(self, _: Button.Pressed) -> None:
self.action_new_chat()
@work(thread=True, exclusive=True)
def _stream_turn(self, messages: List[Dict[str, Any]], assistant_card_id: str) -> None:
interactive_event: Optional[FastGPTInteractiveEvent] = None
try:
with ChatClient(api_key=API_KEY, base_url=BASE_URL) as client:
response = client.create_chat_completion(
messages=messages,
stream=True,
detail=True,
chatId=self.chat_id,
)
response.raise_for_status()
try:
for event in iter_stream_events(response):
if event.kind in {"data", "answer", "fastAnswer"}:
content = _extract_text_from_event(event.kind, event.data)
if content:
self.call_from_thread(self._append_assistant_chunk, assistant_card_id, content)
continue
if event.kind == "flowNodeStatus":
if isinstance(event.data, dict):
status = str(event.data.get("status") or "?")
node_name = str(event.data.get("nodeName") or event.data.get("name") or event.data.get("node_id") or "Unknown node")
self.call_from_thread(self._log_event, f"[flow] {status}: {node_name}")
else:
self.call_from_thread(self._log_event, f"[flow] {event.data}")
continue
if event.kind == "flowResponses":
if isinstance(event.data, dict):
module_name = str(event.data.get("moduleName") or event.data.get("nodeName") or "Unknown module")
self.call_from_thread(self._log_event, f"[flow] response from: {module_name}")
elif isinstance(event.data, list):
self.call_from_thread(self._log_event, f"[flow] response details: {len(event.data)} module record(s)")
else:
self.call_from_thread(self._log_event, f"[flow] response details: {event.data}")
continue
if event.kind == "toolCall":
tool_name = _tool_name_from_event(event.data)
self.call_from_thread(self._log_event, f"[tool] calling: {tool_name}")
continue
if event.kind == "toolParams":
self.call_from_thread(self._log_event, f"[tool] params: {event.data}")
continue
if event.kind == "toolResponse":
self.call_from_thread(self._log_event, f"[tool] response: {event.data}")
continue
if event.kind == "updateVariables":
self.call_from_thread(self._log_event, f"[vars] updated: {event.data}")
continue
if event.kind == "interactive":
interactive_event = event
self.call_from_thread(self._present_interactive, event)
break
if event.kind == "error":
message = str(event.data.get("message") or event.data.get("error") or "Unknown FastGPT error")
raise RuntimeError(message)
if event.kind == "done":
break
finally:
response.close()
except Exception as exc:
self.call_from_thread(self._mark_turn_failed, assistant_card_id, str(exc))
return
self.call_from_thread(
self._complete_turn,
assistant_card_id,
waiting_interactive=interactive_event is not None,
)
def _parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Textual FastGPT chat workbench")
parser.add_argument(
"--chat-id",
dest="chat_id",
help="Reuse an existing FastGPT chatId. Defaults to a random chat_tui_* value.",
)
return parser.parse_args()
def main() -> None:
args = _parse_args()
FastGPTWorkbench(chat_id=args.chat_id).run()
if __name__ == "__main__":
main()