fix: Send Completed event when stream ends without finish_reason
Some checks failed
ci / build-test (push) Failing after 4m51s
Codespell / Check for spelling errors (push) Successful in 4s
sdk / sdks (push) Successful in 11m7s
rust-ci / Detect changed areas (push) Has been cancelled
rust-ci / Format / etc (push) Has been cancelled
rust-ci / cargo shear (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - aarch64-apple-darwin (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - x86_64-apple-darwin (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-musl (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04-arm - aarch64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04-arm - aarch64-unknown-linux-musl (push) Has been cancelled
rust-ci / Lint/Build — windows-11-arm - aarch64-pc-windows-msvc (push) Has been cancelled
rust-ci / Lint/Build — windows-latest - x86_64-pc-windows-msvc (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - aarch64-apple-darwin (release) (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-musl (release) (push) Has been cancelled
rust-ci / Lint/Build — windows-11-arm - aarch64-pc-windows-msvc (release) (push) Has been cancelled
rust-ci / Lint/Build — windows-latest - x86_64-pc-windows-msvc (release) (push) Has been cancelled
rust-ci / Tests — macos-14 - aarch64-apple-darwin (push) Has been cancelled
rust-ci / Tests — ubuntu-24.04 - x86_64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Tests — ubuntu-24.04-arm - aarch64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Tests — windows-11-arm - aarch64-pc-windows-msvc (push) Has been cancelled
rust-ci / Tests — windows-latest - x86_64-pc-windows-msvc (push) Has been cancelled
rust-ci / CI results (required) (push) Has been cancelled

- Add debug logging for finish_reason to diagnose stream completion issues
- When SSE stream ends without sending a finish_reason chunk, emit final items and Completed event
- This fixes the "working" hang when the API stream closes without proper completion signal
- Fixed duplicate max_tokens fields in test provider definitions

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-17 18:23:12 +01:00
parent 401b0b3935
commit a3ced1f246
2 changed files with 17 additions and 3 deletions

View File

@@ -841,6 +841,7 @@ async fn process_chat_sse<S>(
// Emit end-of-turn when finish_reason signals completion.
if let Some(finish_reason) = choice.get("finish_reason").and_then(|v| v.as_str()) {
debug!("Received finish_reason: {}", finish_reason);
match finish_reason {
"tool_calls" if fn_call_state.active => {
// First, flush the terminal raw reasoning so UIs can finalize
@@ -888,6 +889,22 @@ async fn process_chat_sse<S>(
}
}
}
// Stream ended without finish_reason - this can happen when the stream closes abruptly
debug!("Stream ended without finish_reason, emitting final items and Completed event");
if let Some(item) = assistant_item.take() {
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
if let Some(item) = reasoning_item.take() {
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
// Send Completed event so llmx knows the turn is done
let _ = tx_event
.send(Ok(ResponseEvent::Completed {
response_id: String::new(),
token_usage: token_usage.clone(),
}))
.await;
}
/// Optional client-side aggregation helper

View File

@@ -295,7 +295,6 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
max_tokens: None,
requires_openai_auth: false,
},
),
@@ -337,7 +336,6 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
max_tokens: None,
requires_openai_auth: true,
},
),
@@ -384,7 +382,6 @@ pub fn create_oss_provider_with_base_url(base_url: &str) -> ModelProviderInfo {
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
max_tokens: None,
requires_openai_auth: false,
}
}