fix: get responses API working again in Rust (#872)

I inadvertently regressed support for the Responses API when adding
support for the chat completions API in
https://github.com/openai/codex/pull/862. This should get both APIs
working again, but the chat completions codepath seems more complex than
necessary. I'll try to clean that up shortly, but I want to get things
working again ASAP.
This commit is contained in:
Michael Bolin
2025-05-08 22:49:15 -07:00
committed by GitHub
parent e924070cee
commit b940adae8e
2 changed files with 27 additions and 3 deletions

View File

@@ -32,7 +32,6 @@ use tracing::trace;
use tracing::warn;
use crate::WireApi;
use crate::chat_completions::AggregateStreamExt;
use crate::client::ModelClient;
use crate::client_common::Prompt;
use crate::client_common::ResponseEvent;
@@ -864,7 +863,7 @@ async fn try_run_turn(
sub_id: &str,
prompt: &Prompt,
) -> CodexResult<Vec<ProcessedResponseItem>> {
let mut stream = sess.client.clone().stream(prompt).await?.aggregate();
let mut stream = sess.client.clone().stream(prompt).await?;
// Buffer all the incoming messages from the stream first, then execute them.
// If we execute a function call in the middle of handling the stream, it can time out.