api has llm response event

This commit is contained in:
Xin Wang
2026-02-04 12:00:52 +08:00
parent 5aa9a12ca8
commit 7d255468ab
4 changed files with 63 additions and 9 deletions

View File

@@ -115,7 +115,13 @@ class WavFileClient:
"direction": direction,
"message": message
})
print(f"{direction} {message}")
# Handle encoding errors on Windows
try:
print(f"{direction} {message}")
except UnicodeEncodeError:
# Replace problematic characters for console output
safe_message = message.encode('ascii', errors='replace').decode('ascii')
print(f"{direction} {safe_message}")
async def connect(self) -> None:
"""Connect to WebSocket server."""
@@ -285,6 +291,14 @@ class WavFileClient:
elif event_type == "ttfb":
latency_ms = event.get("latencyMs", 0)
self.log_event("", f"[TTFB] Server latency: {latency_ms}ms")
elif event_type == "llmResponse":
text = event.get("text", "")
is_final = event.get("isFinal", False)
if is_final:
self.log_event("", f"LLM Response (final): {text[:100]}{'...' if len(text) > 100 else ''}")
elif self.verbose:
# Show streaming chunks only in verbose mode
self.log_event("", f"LLM: {text}")
elif event_type == "trackStart":
self.track_started = True
self.log_event("", "Bot started speaking")