2 Commits

Author SHA1 Message Date
a3ced1f246 fix: Send Completed event when stream ends without finish_reason
Some checks failed
ci / build-test (push) Failing after 4m51s
Codespell / Check for spelling errors (push) Successful in 4s
sdk / sdks (push) Successful in 11m7s
rust-ci / Detect changed areas (push) Has been cancelled
rust-ci / Format / etc (push) Has been cancelled
rust-ci / cargo shear (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - aarch64-apple-darwin (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - x86_64-apple-darwin (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-musl (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04-arm - aarch64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04-arm - aarch64-unknown-linux-musl (push) Has been cancelled
rust-ci / Lint/Build — windows-11-arm - aarch64-pc-windows-msvc (push) Has been cancelled
rust-ci / Lint/Build — windows-latest - x86_64-pc-windows-msvc (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - aarch64-apple-darwin (release) (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-musl (release) (push) Has been cancelled
rust-ci / Lint/Build — windows-11-arm - aarch64-pc-windows-msvc (release) (push) Has been cancelled
rust-ci / Lint/Build — windows-latest - x86_64-pc-windows-msvc (release) (push) Has been cancelled
rust-ci / Tests — macos-14 - aarch64-apple-darwin (push) Has been cancelled
rust-ci / Tests — ubuntu-24.04 - x86_64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Tests — ubuntu-24.04-arm - aarch64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Tests — windows-11-arm - aarch64-pc-windows-msvc (push) Has been cancelled
rust-ci / Tests — windows-latest - x86_64-pc-windows-msvc (push) Has been cancelled
rust-ci / CI results (required) (push) Has been cancelled
- Add debug logging for finish_reason to diagnose stream completion issues
- When SSE stream ends without sending a finish_reason chunk, emit final items and Completed event
- This fixes the "working" hang when the API stream closes without proper completion signal
- Fixed duplicate max_tokens fields in test provider definitions

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-17 18:23:12 +01:00
401b0b3935 fix: Handle empty assistant messages as turn completion
Some checks failed
rust-ci / Tests — ubuntu-24.04 - x86_64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Tests — ubuntu-24.04-arm - aarch64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Tests — windows-11-arm - aarch64-pc-windows-msvc (push) Has been cancelled
rust-ci / Tests — windows-latest - x86_64-pc-windows-msvc (push) Has been cancelled
rust-ci / CI results (required) (push) Has been cancelled
ci / build-test (push) Failing after 4m50s
Codespell / Check for spelling errors (push) Successful in 4s
rust-ci / Detect changed areas (push) Has been cancelled
rust-ci / Format / etc (push) Has been cancelled
rust-ci / cargo shear (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - aarch64-apple-darwin (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - x86_64-apple-darwin (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-musl (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04-arm - aarch64-unknown-linux-gnu (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04-arm - aarch64-unknown-linux-musl (push) Has been cancelled
rust-ci / Lint/Build — windows-11-arm - aarch64-pc-windows-msvc (push) Has been cancelled
rust-ci / Lint/Build — windows-latest - x86_64-pc-windows-msvc (push) Has been cancelled
rust-ci / Lint/Build — macos-14 - aarch64-apple-darwin (release) (push) Has been cancelled
rust-ci / Lint/Build — ubuntu-24.04 - x86_64-unknown-linux-musl (release) (push) Has been cancelled
rust-ci / Lint/Build — windows-11-arm - aarch64-pc-windows-msvc (release) (push) Has been cancelled
rust-ci / Lint/Build — windows-latest - x86_64-pc-windows-msvc (release) (push) Has been cancelled
rust-ci / Tests — macos-14 - aarch64-apple-darwin (push) Has been cancelled
sdk / sdks (push) Has been cancelled
- When the API returns an empty assistant message (content: []), treat it as turn completion signal
- This fixes the "working" hang that occurs after tool calls when the API stream ends with an empty message
- Updated parse_agent_message to return None for empty content
- Fixes issue where llmx would hang indefinitely waiting for content that never comes

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-17 18:15:24 +01:00
3 changed files with 36 additions and 9 deletions

View File

@@ -841,6 +841,7 @@ async fn process_chat_sse<S>(
// Emit end-of-turn when finish_reason signals completion.
if let Some(finish_reason) = choice.get("finish_reason").and_then(|v| v.as_str()) {
debug!("Received finish_reason: {}", finish_reason);
match finish_reason {
"tool_calls" if fn_call_state.active => {
// First, flush the terminal raw reasoning so UIs can finalize
@@ -888,6 +889,22 @@ async fn process_chat_sse<S>(
}
}
}
// Stream ended without finish_reason - this can happen when the stream closes abruptly
debug!("Stream ended without finish_reason, emitting final items and Completed event");
if let Some(item) = assistant_item.take() {
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
if let Some(item) = reasoning_item.take() {
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
}
// Send Completed event so llmx knows the turn is done
let _ = tx_event
.send(Ok(ResponseEvent::Completed {
response_id: String::new(),
token_usage: token_usage.clone(),
}))
.await;
}
/// Optional client-side aggregation helper

View File

@@ -54,7 +54,7 @@ fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
Some(UserMessageItem::new(&content))
}
fn parse_agent_message(id: Option<&String>, message: &[ContentItem]) -> AgentMessageItem {
fn parse_agent_message(id: Option<&String>, message: &[ContentItem]) -> Option<AgentMessageItem> {
let mut content: Vec<AgentMessageContent> = Vec::new();
for content_item in message.iter() {
match content_item {
@@ -69,18 +69,23 @@ fn parse_agent_message(id: Option<&String>, message: &[ContentItem]) -> AgentMes
}
}
}
// If the message has no content, return None to signal turn completion
// This happens when the API ends a turn with an empty assistant message (e.g., after tool calls)
if content.is_empty() {
return None;
}
let id = id.cloned().unwrap_or_else(|| Uuid::new_v4().to_string());
AgentMessageItem { id, content }
Some(AgentMessageItem { id, content })
}
pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
match item {
ResponseItem::Message { role, content, id } => match role.as_str() {
"user" => parse_user_message(content).map(TurnItem::UserMessage),
"assistant" => Some(TurnItem::AgentMessage(parse_agent_message(
id.as_ref(),
content,
))),
"assistant" => parse_agent_message(id.as_ref(), content)
.map(TurnItem::AgentMessage),
"system" => None,
_ => None,
},

View File

@@ -294,7 +294,7 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
max_tokens: None,
requires_openai_auth: false,
},
),
@@ -335,7 +335,7 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
max_tokens: None,
requires_openai_auth: true,
},
),
@@ -381,7 +381,7 @@ pub fn create_oss_provider_with_base_url(base_url: &str) -> ModelProviderInfo {
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
max_tokens: None,
requires_openai_auth: false,
}
}
@@ -422,6 +422,7 @@ base_url = "http://localhost:11434/v1"
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
requires_openai_auth: false,
};
@@ -452,6 +453,7 @@ query_params = { api-version = "2025-04-01-preview" }
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
requires_openai_auth: false,
};
@@ -485,6 +487,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
requires_openai_auth: false,
};
@@ -508,6 +511,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
requires_openai_auth: false,
}
}
@@ -541,6 +545,7 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
max_tokens: None,
requires_openai_auth: false,
};
assert!(named_provider.is_azure_responses_endpoint());