From a3ced1f246c85c184a5a11aaaf9e891d8e9120c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Mon, 17 Nov 2025 18:23:12 +0100 Subject: [PATCH] fix: Send Completed event when stream ends without finish_reason MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add debug logging for finish_reason to diagnose stream completion issues - When SSE stream ends without sending a finish_reason chunk, emit final items and Completed event - This fixes the "working" hang when the API stream closes without proper completion signal - Fixed duplicate max_tokens fields in test provider definitions 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- llmx-rs/core/src/chat_completions.rs | 17 +++++++++++++++++ llmx-rs/core/src/model_provider_info.rs | 3 --- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/llmx-rs/core/src/chat_completions.rs b/llmx-rs/core/src/chat_completions.rs index 4f24e3e3..14d81294 100644 --- a/llmx-rs/core/src/chat_completions.rs +++ b/llmx-rs/core/src/chat_completions.rs @@ -841,6 +841,7 @@ async fn process_chat_sse( // Emit end-of-turn when finish_reason signals completion. if let Some(finish_reason) = choice.get("finish_reason").and_then(|v| v.as_str()) { + debug!("Received finish_reason: {}", finish_reason); match finish_reason { "tool_calls" if fn_call_state.active => { // First, flush the terminal raw reasoning so UIs can finalize @@ -888,6 +889,22 @@ async fn process_chat_sse( } } } + + // Stream ended without finish_reason - this can happen when the stream closes abruptly + debug!("Stream ended without finish_reason, emitting final items and Completed event"); + if let Some(item) = assistant_item.take() { + let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await; + } + if let Some(item) = reasoning_item.take() { + let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await; + } + // Send Completed event so llmx knows the turn is done + let _ = tx_event + .send(Ok(ResponseEvent::Completed { + response_id: String::new(), + token_usage: token_usage.clone(), + })) + .await; } /// Optional client-side aggregation helper diff --git a/llmx-rs/core/src/model_provider_info.rs b/llmx-rs/core/src/model_provider_info.rs index 58da2580..4eaa3794 100644 --- a/llmx-rs/core/src/model_provider_info.rs +++ b/llmx-rs/core/src/model_provider_info.rs @@ -295,7 +295,6 @@ pub fn built_in_model_providers() -> HashMap { stream_max_retries: None, stream_idle_timeout_ms: None, max_tokens: None, - max_tokens: None, requires_openai_auth: false, }, ), @@ -337,7 +336,6 @@ pub fn built_in_model_providers() -> HashMap { stream_max_retries: None, stream_idle_timeout_ms: None, max_tokens: None, - max_tokens: None, requires_openai_auth: true, }, ), @@ -384,7 +382,6 @@ pub fn create_oss_provider_with_base_url(base_url: &str) -> ModelProviderInfo { stream_max_retries: None, stream_idle_timeout_ms: None, max_tokens: None, - max_tokens: None, requires_openai_auth: false, } }