Enhance DuplexPipeline to track assistant activity and improve presence probe logic. Introduce a method to update assistant activity timestamp and modify presence probe conditions to consider both user and assistant activity. Update tests to validate new behavior and ensure contextual prompts are generated correctly.
This commit is contained in:
@@ -62,6 +62,12 @@ class _FakeLLM:
|
||||
self._rounds = rounds
|
||||
self._call_index = 0
|
||||
|
||||
async def generate(self, messages, temperature=0.7, max_tokens=None):
|
||||
prompt = " ".join([str(getattr(m, "content", "")) for m in messages])
|
||||
if "订单号" in prompt:
|
||||
return "关于订单号这块,你还在线吗?"
|
||||
return "你还在线吗?"
|
||||
|
||||
async def generate_stream(self, _messages, temperature=0.7, max_tokens=None):
|
||||
idx = self._call_index
|
||||
self._call_index += 1
|
||||
@@ -384,7 +390,9 @@ async def test_presence_probe_emits_contextual_direct_prompt(monkeypatch):
|
||||
)
|
||||
await pipeline._shutdown_presence_probe_task()
|
||||
await pipeline.conversation.add_assistant_turn("请把你的订单号告诉我,我继续帮你处理。")
|
||||
pipeline._last_user_activity_ms = (time.monotonic() * 1000.0) - 8000.0
|
||||
now_ms = time.monotonic() * 1000.0
|
||||
pipeline._last_user_activity_ms = now_ms - 8000.0
|
||||
pipeline._last_assistant_activity_ms = now_ms - 8000.0
|
||||
|
||||
fired = await pipeline._run_presence_probe_once()
|
||||
|
||||
@@ -392,6 +400,7 @@ async def test_presence_probe_emits_contextual_direct_prompt(monkeypatch):
|
||||
probe_text_events = [e for e in events if e.get("type") == "assistant.response.final"]
|
||||
assert probe_text_events
|
||||
assert "订单号" in str(probe_text_events[-1].get("text") or "")
|
||||
assert "订单号" in str(pipeline.conversation.last_assistant_text or "")
|
||||
assert any(e.get("type") == "output.audio.start" for e in events)
|
||||
assert not any(e.get("type") == "assistant.tool_call" for e in events)
|
||||
|
||||
@@ -412,7 +421,9 @@ async def test_presence_probe_respects_max_prompts_limit(monkeypatch):
|
||||
)
|
||||
await pipeline._shutdown_presence_probe_task()
|
||||
await pipeline.conversation.add_assistant_turn("我们继续。")
|
||||
pipeline._last_user_activity_ms = (time.monotonic() * 1000.0) - 8000.0
|
||||
now_ms = time.monotonic() * 1000.0
|
||||
pipeline._last_user_activity_ms = now_ms - 8000.0
|
||||
pipeline._last_assistant_activity_ms = now_ms - 8000.0
|
||||
|
||||
first_fired = await pipeline._run_presence_probe_once()
|
||||
second_fired = await pipeline._run_presence_probe_once(
|
||||
@@ -441,7 +452,9 @@ async def test_presence_probe_text_mode_emits_text_only(monkeypatch):
|
||||
)
|
||||
await pipeline._shutdown_presence_probe_task()
|
||||
await pipeline.conversation.add_assistant_turn("我们继续。")
|
||||
pipeline._last_user_activity_ms = (time.monotonic() * 1000.0) - 8000.0
|
||||
now_ms = time.monotonic() * 1000.0
|
||||
pipeline._last_user_activity_ms = now_ms - 8000.0
|
||||
pipeline._last_assistant_activity_ms = now_ms - 8000.0
|
||||
|
||||
fired = await pipeline._run_presence_probe_once()
|
||||
|
||||
@@ -451,6 +464,28 @@ async def test_presence_probe_text_mode_emits_text_only(monkeypatch):
|
||||
assert not any(e.get("type") == "output.audio.start" for e in events)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_presence_probe_does_not_count_assistant_speaking_time_as_idle(monkeypatch):
|
||||
pipeline, _events = _build_pipeline(monkeypatch, [[LLMStreamEvent(type="done")]])
|
||||
pipeline.apply_runtime_overrides(
|
||||
{
|
||||
"presenceProbe": {
|
||||
"enabled": True,
|
||||
"idleSeconds": 10,
|
||||
"cooldownSeconds": 5,
|
||||
"maxPrompts": 1,
|
||||
}
|
||||
}
|
||||
)
|
||||
await pipeline._shutdown_presence_probe_task()
|
||||
await pipeline.conversation.add_assistant_turn("我们继续。")
|
||||
now_ms = time.monotonic() * 1000.0
|
||||
pipeline._last_user_activity_ms = now_ms - 30_000.0
|
||||
pipeline._last_assistant_activity_ms = now_ms - 2_000.0
|
||||
|
||||
assert pipeline._presence_probe_due(now_ms) is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_server_calculator_emits_tool_result(monkeypatch):
|
||||
pipeline, events = _build_pipeline(
|
||||
|
||||
Reference in New Issue
Block a user