fix: introduce create_tools_json() and share it with chat_completions.rs (#1177)

The main motivator behind this PR is that `stream_chat_completions()`
was not adding the `"tools"` entry to the payload posted to the
`/chat/completions` endpoint. This (1) refactors the existing logic to
build up the `"tools"` JSON from `client.rs` into `openai_tools.rs`, and
(2) updates the use of responses API (`client.rs`) and chat completions
API (`chat_completions.rs`) to both use it.

Note this PR alone is not sufficient to get tool calling from chat
completions working: that is done in
https://github.com/openai/codex/pull/1167.

---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/1177).
* #1167
* __->__ #1177
This commit is contained in:
Michael Bolin
2025-05-30 14:07:03 -07:00
committed by GitHub
parent e207f20f64
commit 1bf82056b3
4 changed files with 169 additions and 116 deletions

View File

@@ -25,6 +25,7 @@ use crate::flags::OPENAI_REQUEST_MAX_RETRIES;
use crate::flags::OPENAI_STREAM_IDLE_TIMEOUT_MS;
use crate::models::ContentItem;
use crate::models::ResponseItem;
use crate::openai_tools::create_tools_json_for_chat_completions_api;
use crate::util::backoff;
/// Implementation for the classic Chat Completions API. This is intentionally
@@ -56,17 +57,22 @@ pub(crate) async fn stream_chat_completions(
}
}
let tools_json = create_tools_json_for_chat_completions_api(prompt, model)?;
let payload = json!({
"model": model,
"messages": messages,
"stream": true
"stream": true,
"tools": tools_json,
});
let base_url = provider.base_url.trim_end_matches('/');
let url = format!("{}/chat/completions", base_url);
debug!(url, "POST (chat)");
trace!("request payload: {}", payload);
trace!(
"request payload: {}",
serde_json::to_string_pretty(&payload).unwrap_or_default()
);
let api_key = provider.api_key()?;
let mut attempt = 0;