708 lines
24 KiB
Python
708 lines
24 KiB
Python
"""Textual FastGPT workbench styled like a coding assistant terminal.
|
|
|
|
Run from the examples directory with .env configured:
|
|
python chat_tui.py
|
|
python chat_tui.py --chat-id my_existing_conversation
|
|
|
|
This example provides:
|
|
- a full-screen Textual interface
|
|
- streaming chat updates
|
|
- workflow / tool event logging
|
|
- modal handling for FastGPT interactive nodes
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
import json
|
|
import sys
|
|
import uuid
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
from textual import on, work
|
|
from textual.app import App, ComposeResult
|
|
from textual.binding import Binding
|
|
from textual.containers import Container, Horizontal, Vertical, VerticalScroll
|
|
from textual.screen import ModalScreen
|
|
from textual.widgets import Button, Checkbox, Footer, Input, RichLog, Static, TextArea
|
|
|
|
EXAMPLES_DIR = Path(__file__).resolve().parent
|
|
if str(EXAMPLES_DIR) not in sys.path:
|
|
sys.path.insert(0, str(EXAMPLES_DIR))
|
|
|
|
from chat_cli import (
|
|
APP_ID,
|
|
API_KEY,
|
|
BASE_URL,
|
|
_extract_chat_init_opener,
|
|
_extract_text_from_event,
|
|
_interactive_prompt_text,
|
|
_normalize_option,
|
|
_tool_name_from_event,
|
|
)
|
|
from fastgpt_client import ChatClient, FastGPTInteractiveEvent, iter_stream_events
|
|
|
|
|
|
class MessageCard(Static):
|
|
"""Lightweight message block used in the transcript pane."""
|
|
|
|
def __init__(self, role: str, title: str, content: str, widget_id: str) -> None:
|
|
super().__init__(content or " ", id=widget_id, classes=f"message {role}")
|
|
self.role = role
|
|
self.content_text = content
|
|
self.border_title = title
|
|
|
|
def set_text(self, content: str) -> None:
|
|
self.content_text = content
|
|
self.update(content or " ")
|
|
|
|
def append_text(self, chunk: str) -> None:
|
|
if self.content_text in {"", "Thinking…"}:
|
|
self.content_text = chunk
|
|
else:
|
|
self.content_text += chunk
|
|
self.update(self.content_text or " ")
|
|
|
|
|
|
class InteractiveInputScreen(ModalScreen[Optional[str]]):
|
|
"""Modal used for FastGPT userInput interactions."""
|
|
|
|
CSS = """
|
|
InteractiveInputScreen {
|
|
align: center middle;
|
|
background: rgba(4, 7, 11, 0.82);
|
|
}
|
|
|
|
.dialog {
|
|
width: 84;
|
|
max-width: 110;
|
|
background: #101620;
|
|
border: round #47c6b3;
|
|
padding: 1 2;
|
|
}
|
|
|
|
.dialog-title {
|
|
color: #f5d76e;
|
|
text-style: bold;
|
|
margin-bottom: 1;
|
|
}
|
|
|
|
.field-label {
|
|
color: #c6d2df;
|
|
margin-top: 1;
|
|
}
|
|
|
|
.dialog-actions {
|
|
margin-top: 1;
|
|
height: auto;
|
|
}
|
|
|
|
.dialog-actions Button {
|
|
width: 1fr;
|
|
margin-right: 1;
|
|
}
|
|
|
|
.validation {
|
|
color: #ffb4a2;
|
|
margin-top: 1;
|
|
}
|
|
"""
|
|
|
|
def __init__(self, event: FastGPTInteractiveEvent) -> None:
|
|
super().__init__()
|
|
self.event = event
|
|
self.prompt_text = _interactive_prompt_text(event.data, "Please provide the requested input")
|
|
raw_fields = event.data.get("params", {}).get("inputForm", [])
|
|
self.fields: List[Dict[str, Any]] = []
|
|
for index, raw_field in enumerate(raw_fields, start=1):
|
|
if not isinstance(raw_field, dict):
|
|
continue
|
|
key = str(raw_field.get("key") or raw_field.get("name") or f"field_{index}").strip() or f"field_{index}"
|
|
label = str(raw_field.get("label") or raw_field.get("name") or key).strip() or key
|
|
placeholder = str(raw_field.get("placeholder") or raw_field.get("description") or "").strip()
|
|
default_value = raw_field.get("defaultValue", raw_field.get("default"))
|
|
required = bool(raw_field.get("required"))
|
|
self.fields.append(
|
|
{
|
|
"key": key,
|
|
"label": label,
|
|
"placeholder": placeholder,
|
|
"default": "" if default_value in (None, "") else str(default_value),
|
|
"required": required,
|
|
}
|
|
)
|
|
|
|
def compose(self) -> ComposeResult:
|
|
with Container(classes="dialog"):
|
|
yield Static(self.prompt_text, classes="dialog-title")
|
|
if not self.fields:
|
|
yield Static("FastGPT did not provide structured fields. Enter a single value below.", classes="field-label")
|
|
yield Input(placeholder="Workflow input", id="input-freeform")
|
|
else:
|
|
for index, field in enumerate(self.fields, start=1):
|
|
suffix = "" if field["required"] else " [optional]"
|
|
yield Static(f"{index}. {field['label']}{suffix}", classes="field-label")
|
|
yield Input(
|
|
value=field["default"],
|
|
placeholder=field["placeholder"],
|
|
id=f"input-{index}",
|
|
)
|
|
yield Static("", id="validation", classes="validation")
|
|
with Horizontal(classes="dialog-actions"):
|
|
yield Button("Cancel", id="cancel")
|
|
yield Button("Submit", id="submit", variant="primary")
|
|
|
|
def on_mount(self) -> None:
|
|
inputs = list(self.query(Input))
|
|
if inputs:
|
|
inputs[0].focus()
|
|
else:
|
|
self.query_one("#submit", Button).focus()
|
|
|
|
@on(Button.Pressed)
|
|
def handle_button(self, event: Button.Pressed) -> None:
|
|
if event.button.id == "cancel":
|
|
self.dismiss(None)
|
|
return
|
|
|
|
if not self.fields:
|
|
value = self.query_one("#input-freeform", Input).value.strip()
|
|
self.dismiss(None if not value else value)
|
|
return
|
|
|
|
payload: Dict[str, Any] = {}
|
|
for index, field in enumerate(self.fields, start=1):
|
|
value = self.query_one(f"#input-{index}", Input).value.strip()
|
|
if not value and field["required"]:
|
|
self.query_one("#validation", Static).update(f"{field['label']} is required.")
|
|
return
|
|
payload[field["key"]] = value
|
|
|
|
self.dismiss(json.dumps(payload, ensure_ascii=False))
|
|
|
|
|
|
class InteractiveSelectScreen(ModalScreen[Optional[str]]):
|
|
"""Modal used for FastGPT userSelect interactions."""
|
|
|
|
CSS = """
|
|
InteractiveSelectScreen {
|
|
align: center middle;
|
|
background: rgba(4, 7, 11, 0.82);
|
|
}
|
|
|
|
.dialog {
|
|
width: 84;
|
|
max-width: 110;
|
|
background: #101620;
|
|
border: round #f5d76e;
|
|
padding: 1 2;
|
|
}
|
|
|
|
.dialog-title {
|
|
color: #f5d76e;
|
|
text-style: bold;
|
|
margin-bottom: 1;
|
|
}
|
|
|
|
.option-description {
|
|
color: #8fa1b3;
|
|
margin-left: 2;
|
|
margin-bottom: 1;
|
|
}
|
|
|
|
.dialog-actions {
|
|
margin-top: 1;
|
|
height: auto;
|
|
}
|
|
|
|
.dialog-actions Button {
|
|
width: 1fr;
|
|
margin-right: 1;
|
|
}
|
|
|
|
.choice-button {
|
|
width: 1fr;
|
|
margin-top: 1;
|
|
}
|
|
|
|
.validation {
|
|
color: #ffb4a2;
|
|
margin-top: 1;
|
|
}
|
|
"""
|
|
|
|
def __init__(self, event: FastGPTInteractiveEvent) -> None:
|
|
super().__init__()
|
|
self.event = event
|
|
payload = event.data
|
|
params = payload.get("params") if isinstance(payload.get("params"), dict) else {}
|
|
self.prompt_text = _interactive_prompt_text(payload, "Please select an option")
|
|
self.multiple = bool(params.get("multiple") or payload.get("multiple"))
|
|
raw_options = params.get("userSelectOptions") if isinstance(params.get("userSelectOptions"), list) else []
|
|
self.options = [
|
|
item
|
|
for index, raw_option in enumerate(raw_options, start=1)
|
|
if (item := _normalize_option(raw_option, index))
|
|
]
|
|
|
|
def compose(self) -> ComposeResult:
|
|
with Container(classes="dialog"):
|
|
yield Static(self.prompt_text, classes="dialog-title")
|
|
if not self.options:
|
|
yield Static("FastGPT did not provide selectable options.", classes="option-description")
|
|
elif self.multiple:
|
|
for index, option in enumerate(self.options, start=1):
|
|
label = f"{index}. {option['label']}"
|
|
if option["description"]:
|
|
label = f"{label} - {option['description']}"
|
|
yield Checkbox(label, id=f"check-{index}")
|
|
else:
|
|
for index, option in enumerate(self.options, start=1):
|
|
yield Button(f"{index}. {option['label']}", id=f"choice-{index}", classes="choice-button")
|
|
if option["description"]:
|
|
yield Static(option["description"], classes="option-description")
|
|
yield Static("", id="validation", classes="validation")
|
|
with Horizontal(classes="dialog-actions"):
|
|
yield Button("Cancel", id="cancel")
|
|
if self.multiple:
|
|
yield Button("Submit", id="submit", variant="primary")
|
|
|
|
def on_mount(self) -> None:
|
|
buttons = [button for button in self.query(Button) if button.id and button.id != "cancel"]
|
|
if buttons:
|
|
buttons[0].focus()
|
|
else:
|
|
self.query_one("#cancel", Button).focus()
|
|
|
|
@on(Button.Pressed)
|
|
def handle_button(self, event: Button.Pressed) -> None:
|
|
button_id = event.button.id or ""
|
|
if button_id == "cancel":
|
|
self.dismiss(None)
|
|
return
|
|
|
|
if button_id.startswith("choice-"):
|
|
index = int(button_id.split("-", 1)[1]) - 1
|
|
self.dismiss(self.options[index]["value"])
|
|
return
|
|
|
|
if button_id == "submit":
|
|
selected: List[str] = []
|
|
for index, option in enumerate(self.options, start=1):
|
|
if self.query_one(f"#check-{index}", Checkbox).value:
|
|
selected.append(option["value"])
|
|
if not selected:
|
|
self.query_one("#validation", Static).update("Select at least one option.")
|
|
return
|
|
self.dismiss(", ".join(selected))
|
|
|
|
|
|
class FastGPTWorkbench(App[None]):
|
|
"""Claude Code style Textual workbench for FastGPT streaming chat."""
|
|
|
|
CSS = """
|
|
Screen {
|
|
background: #0b0f14;
|
|
color: #e6edf3;
|
|
}
|
|
|
|
#shell {
|
|
layout: grid;
|
|
grid-size: 2;
|
|
grid-columns: 32 1fr;
|
|
height: 1fr;
|
|
}
|
|
|
|
#sidebar {
|
|
padding: 1 1 1 2;
|
|
background: #101620;
|
|
border-right: heavy #273244;
|
|
}
|
|
|
|
#brand {
|
|
color: #f5d76e;
|
|
text-style: bold;
|
|
margin-bottom: 1;
|
|
}
|
|
|
|
.panel {
|
|
background: #0d131d;
|
|
border: round #2b3547;
|
|
padding: 1;
|
|
margin-bottom: 1;
|
|
}
|
|
|
|
#event_log {
|
|
height: 1fr;
|
|
background: #0d131d;
|
|
border: round #2b3547;
|
|
}
|
|
|
|
#main_panel {
|
|
layout: vertical;
|
|
}
|
|
|
|
#chat_title {
|
|
padding: 1 2;
|
|
background: #101620;
|
|
border-bottom: heavy #273244;
|
|
color: #f0f3f7;
|
|
text-style: bold;
|
|
}
|
|
|
|
#messages {
|
|
padding: 1 2;
|
|
height: 1fr;
|
|
}
|
|
|
|
.message {
|
|
width: 1fr;
|
|
padding: 1 2;
|
|
margin-bottom: 1;
|
|
background: #111723;
|
|
border: round #2b3547;
|
|
}
|
|
|
|
.user {
|
|
border-left: tall #f5d76e;
|
|
background: #16161d;
|
|
}
|
|
|
|
.assistant {
|
|
border-left: tall #47c6b3;
|
|
background: #0f1821;
|
|
}
|
|
|
|
.system {
|
|
border-left: tall #7aa2f7;
|
|
background: #101824;
|
|
}
|
|
|
|
.workflow {
|
|
border-left: tall #ff9e64;
|
|
background: #1b1712;
|
|
}
|
|
|
|
#composer_shell {
|
|
layout: vertical;
|
|
height: 12;
|
|
padding: 1 2;
|
|
background: #101620;
|
|
border-top: heavy #273244;
|
|
}
|
|
|
|
#composer {
|
|
height: 1fr;
|
|
background: #0d131d;
|
|
color: #eef4ff;
|
|
border: round #2b3547;
|
|
}
|
|
|
|
#composer_actions {
|
|
height: auto;
|
|
margin-top: 1;
|
|
}
|
|
|
|
#composer_actions Button {
|
|
width: 16;
|
|
margin-left: 1;
|
|
}
|
|
|
|
#composer_spacer {
|
|
width: 1fr;
|
|
}
|
|
|
|
"""
|
|
|
|
TITLE = "FastGPT Workbench"
|
|
SUB_TITLE = "Claude-style Textual TUI"
|
|
BINDINGS = [
|
|
Binding("ctrl+j", "send_message", "Send"),
|
|
Binding("ctrl+n", "new_chat", "New Chat"),
|
|
Binding("ctrl+c", "quit", "Quit"),
|
|
]
|
|
|
|
def __init__(self, chat_id: Optional[str] = None) -> None:
|
|
super().__init__()
|
|
self.chat_id = chat_id or self._new_chat_id()
|
|
self._message_counter = 0
|
|
self._busy = False
|
|
|
|
def compose(self) -> ComposeResult:
|
|
with Container(id="shell"):
|
|
with Vertical(id="sidebar"):
|
|
yield Static("FastGPT Workbench", id="brand")
|
|
yield Static("", id="session_panel", classes="panel")
|
|
yield Static("", id="status_panel", classes="panel")
|
|
yield Static("Ctrl+J send\nCtrl+N new chat\nEsc closes modal prompts", classes="panel")
|
|
yield RichLog(id="event_log", wrap=True, highlight=False, markup=False)
|
|
with Vertical(id="main_panel"):
|
|
yield Static("Claude-style FastGPT Console", id="chat_title")
|
|
yield VerticalScroll(id="messages")
|
|
with Vertical(id="composer_shell"):
|
|
yield TextArea("", id="composer")
|
|
with Horizontal(id="composer_actions"):
|
|
yield Static("", id="composer_spacer")
|
|
yield Button("New Chat", id="new_chat")
|
|
yield Button("Send", id="send", variant="primary")
|
|
yield Footer()
|
|
|
|
def on_mount(self) -> None:
|
|
if not API_KEY or not BASE_URL:
|
|
raise RuntimeError("Set API_KEY and BASE_URL in examples/.env before starting chat_tui.py")
|
|
self._refresh_sidebar()
|
|
self._set_status("Ready", "Fresh session")
|
|
initial_message = self._initial_session_message()
|
|
self._append_message(
|
|
role="system",
|
|
title="Session",
|
|
content=initial_message,
|
|
)
|
|
self.query_one("#composer", TextArea).focus()
|
|
|
|
def _new_chat_id(self) -> str:
|
|
return f"chat_tui_{uuid.uuid4().hex[:12]}"
|
|
|
|
def _refresh_sidebar(self) -> None:
|
|
session_panel = self.query_one("#session_panel", Static)
|
|
base_url = BASE_URL or ""
|
|
app_id = APP_ID or "(not set)"
|
|
session_panel.update(
|
|
f"Session\n\nchatId: {self.chat_id}\nappId: {app_id}\nbaseUrl: {base_url}"
|
|
)
|
|
|
|
def _set_status(self, heading: str, detail: str) -> None:
|
|
panel = self.query_one("#status_panel", Static)
|
|
panel.update(f"Status\n\n{heading}\n{detail}")
|
|
|
|
def _log_event(self, message: str) -> None:
|
|
self.query_one("#event_log", RichLog).write(message)
|
|
|
|
def _format_workflow_payload(self, content: str) -> str:
|
|
try:
|
|
return json.dumps(json.loads(content), ensure_ascii=False, indent=2)
|
|
except Exception:
|
|
return content
|
|
|
|
def _default_session_message(self) -> str:
|
|
return "Start typing below. FastGPT workflow events will appear in the left rail."
|
|
|
|
def _initial_session_message(self) -> str:
|
|
if not APP_ID:
|
|
return self._default_session_message()
|
|
|
|
try:
|
|
with ChatClient(api_key=API_KEY, base_url=BASE_URL) as client:
|
|
response = client.get_chat_init(appId=APP_ID, chatId=self.chat_id)
|
|
response.raise_for_status()
|
|
opener = _extract_chat_init_opener(response.json())
|
|
except Exception as exc:
|
|
self._log_event(f"[init] Failed to load app opener: {exc}")
|
|
return self._default_session_message()
|
|
|
|
return opener or self._default_session_message()
|
|
|
|
def _append_message(self, role: str, title: str, content: str) -> str:
|
|
self._message_counter += 1
|
|
widget_id = f"message-{self._message_counter}"
|
|
card = MessageCard(role=role, title=title, content=content, widget_id=widget_id)
|
|
messages = self.query_one("#messages", VerticalScroll)
|
|
messages.mount(card)
|
|
messages.scroll_end(animate=False)
|
|
return widget_id
|
|
|
|
def _assistant_card(self) -> str:
|
|
card_id = self._append_message("assistant", "FastGPT", "Thinking…")
|
|
self.query_one(f"#{card_id}", MessageCard).set_text("Thinking…")
|
|
return card_id
|
|
|
|
def _start_turn(self, content: str, *, title: str = "You", role: str = "user") -> None:
|
|
if self._busy:
|
|
self._log_event("[local] Busy streaming. Wait for the current turn to finish.")
|
|
return
|
|
|
|
display_content = self._format_workflow_payload(content) if role == "workflow" else content
|
|
self._append_message(role=role, title=title, content=display_content)
|
|
assistant_card_id = self._assistant_card()
|
|
self._busy = True
|
|
self._set_status("Streaming", "Receiving FastGPT output")
|
|
self._stream_turn(
|
|
messages=[{"role": "user", "content": content}],
|
|
assistant_card_id=assistant_card_id,
|
|
)
|
|
|
|
def _complete_turn(self, assistant_card_id: str, *, waiting_interactive: bool) -> None:
|
|
card = self.query_one(f"#{assistant_card_id}", MessageCard)
|
|
if waiting_interactive and not card.content_text.strip():
|
|
card.set_text("Waiting for workflow input…")
|
|
self._set_status("Interactive", "Provide the requested workflow input")
|
|
elif not card.content_text.strip():
|
|
card.set_text("(no text response)")
|
|
self._set_status("Ready", "Idle")
|
|
else:
|
|
self._set_status("Ready", "Idle")
|
|
self._busy = False
|
|
|
|
def _append_assistant_chunk(self, assistant_card_id: str, chunk: str) -> None:
|
|
card = self.query_one(f"#{assistant_card_id}", MessageCard)
|
|
card.append_text(chunk)
|
|
self.query_one("#messages", VerticalScroll).scroll_end(animate=False)
|
|
|
|
def _mark_turn_failed(self, assistant_card_id: str, message: str) -> None:
|
|
card = self.query_one(f"#{assistant_card_id}", MessageCard)
|
|
if card.content_text in {"", "Thinking…"}:
|
|
card.set_text(f"Error: {message}")
|
|
else:
|
|
card.append_text(f"\n\nError: {message}")
|
|
self._busy = False
|
|
self._set_status("Error", message)
|
|
self._log_event(f"[error] {message}")
|
|
|
|
def _handle_interactive_result(self, result: Optional[str]) -> None:
|
|
if result is None:
|
|
self._log_event("[interactive] Prompt cancelled locally.")
|
|
self._set_status("Ready", "Interactive prompt dismissed")
|
|
return
|
|
|
|
self._start_turn(result, title="Workflow Input", role="workflow")
|
|
|
|
def _present_interactive(self, event: FastGPTInteractiveEvent) -> None:
|
|
prompt_summary = _interactive_prompt_text(event.data, event.interaction_type).replace("\n", " / ")
|
|
self._log_event(f"[interactive] {event.interaction_type}: {prompt_summary}")
|
|
if event.interaction_type == "userInput":
|
|
self.push_screen(InteractiveInputScreen(event), self._handle_interactive_result)
|
|
return
|
|
self.push_screen(InteractiveSelectScreen(event), self._handle_interactive_result)
|
|
|
|
def action_send_message(self) -> None:
|
|
composer = self.query_one("#composer", TextArea)
|
|
content = composer.text.strip()
|
|
if not content:
|
|
return
|
|
composer.text = ""
|
|
composer.focus()
|
|
self._start_turn(content)
|
|
|
|
def action_new_chat(self) -> None:
|
|
if self._busy:
|
|
self._log_event("[local] Cannot reset chat while a turn is streaming.")
|
|
return
|
|
self.chat_id = self._new_chat_id()
|
|
self.query_one("#messages", VerticalScroll).remove_children()
|
|
self._refresh_sidebar()
|
|
self._set_status("Ready", "Started a new random session")
|
|
initial_message = self._initial_session_message()
|
|
self._append_message(
|
|
role="system",
|
|
title="Session",
|
|
content=initial_message,
|
|
)
|
|
self._log_event(f"[local] Started new chatId {self.chat_id}")
|
|
|
|
@on(Button.Pressed, "#send")
|
|
def _send_button(self, _: Button.Pressed) -> None:
|
|
self.action_send_message()
|
|
|
|
@on(Button.Pressed, "#new_chat")
|
|
def _new_chat_button(self, _: Button.Pressed) -> None:
|
|
self.action_new_chat()
|
|
|
|
@work(thread=True, exclusive=True)
|
|
def _stream_turn(self, messages: List[Dict[str, Any]], assistant_card_id: str) -> None:
|
|
interactive_event: Optional[FastGPTInteractiveEvent] = None
|
|
try:
|
|
with ChatClient(api_key=API_KEY, base_url=BASE_URL) as client:
|
|
response = client.create_chat_completion(
|
|
messages=messages,
|
|
stream=True,
|
|
detail=True,
|
|
chatId=self.chat_id,
|
|
)
|
|
response.raise_for_status()
|
|
try:
|
|
for event in iter_stream_events(response):
|
|
if event.kind in {"data", "answer", "fastAnswer"}:
|
|
content = _extract_text_from_event(event.kind, event.data)
|
|
if content:
|
|
self.call_from_thread(self._append_assistant_chunk, assistant_card_id, content)
|
|
continue
|
|
|
|
if event.kind == "flowNodeStatus":
|
|
if isinstance(event.data, dict):
|
|
status = str(event.data.get("status") or "?")
|
|
node_name = str(event.data.get("nodeName") or event.data.get("name") or event.data.get("node_id") or "Unknown node")
|
|
self.call_from_thread(self._log_event, f"[flow] {status}: {node_name}")
|
|
else:
|
|
self.call_from_thread(self._log_event, f"[flow] {event.data}")
|
|
continue
|
|
|
|
if event.kind == "flowResponses":
|
|
if isinstance(event.data, dict):
|
|
module_name = str(event.data.get("moduleName") or event.data.get("nodeName") or "Unknown module")
|
|
self.call_from_thread(self._log_event, f"[flow] response from: {module_name}")
|
|
elif isinstance(event.data, list):
|
|
self.call_from_thread(self._log_event, f"[flow] response details: {len(event.data)} module record(s)")
|
|
else:
|
|
self.call_from_thread(self._log_event, f"[flow] response details: {event.data}")
|
|
continue
|
|
|
|
if event.kind == "toolCall":
|
|
tool_name = _tool_name_from_event(event.data)
|
|
self.call_from_thread(self._log_event, f"[tool] calling: {tool_name}")
|
|
continue
|
|
|
|
if event.kind == "toolParams":
|
|
self.call_from_thread(self._log_event, f"[tool] params: {event.data}")
|
|
continue
|
|
|
|
if event.kind == "toolResponse":
|
|
self.call_from_thread(self._log_event, f"[tool] response: {event.data}")
|
|
continue
|
|
|
|
if event.kind == "updateVariables":
|
|
self.call_from_thread(self._log_event, f"[vars] updated: {event.data}")
|
|
continue
|
|
|
|
if event.kind == "interactive":
|
|
interactive_event = event
|
|
self.call_from_thread(self._present_interactive, event)
|
|
break
|
|
|
|
if event.kind == "error":
|
|
message = str(event.data.get("message") or event.data.get("error") or "Unknown FastGPT error")
|
|
raise RuntimeError(message)
|
|
|
|
if event.kind == "done":
|
|
break
|
|
finally:
|
|
response.close()
|
|
except Exception as exc:
|
|
self.call_from_thread(self._mark_turn_failed, assistant_card_id, str(exc))
|
|
return
|
|
|
|
self.call_from_thread(
|
|
self._complete_turn,
|
|
assistant_card_id,
|
|
waiting_interactive=interactive_event is not None,
|
|
)
|
|
|
|
|
|
def _parse_args() -> argparse.Namespace:
|
|
parser = argparse.ArgumentParser(description="Textual FastGPT chat workbench")
|
|
parser.add_argument(
|
|
"--chat-id",
|
|
dest="chat_id",
|
|
help="Reuse an existing FastGPT chatId. Defaults to a random chat_tui_* value.",
|
|
)
|
|
return parser.parse_args()
|
|
|
|
|
|
def main() -> None:
|
|
args = _parse_args()
|
|
FastGPTWorkbench(chat_id=args.chat_id).run()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|