[apply_patch] freeform apply_patch tool (#2576)
## Summary GPT-5 introduced the concept of [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools), which allow the model to send a raw string result back, simplifying json-escape issues. We are migrating gpt-5 to use this by default. However, gpt-oss models do not support custom tools, only normal functions. So we keep both tool definitions, and provide whichever one the model family supports. ## Testing - [x] Tested locally with various models - [x] Unit tests pass
This commit is contained in:
@@ -1,6 +1,9 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
@@ -25,8 +28,20 @@ fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("tests/fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
fn assert_tool_names(body: &serde_json::Value, expected_names: &[&str]) {
|
||||
assert_eq!(
|
||||
body["tools"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|t| t["name"].as_str().unwrap().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
expected_names
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn default_system_instructions_contain_apply_patch() {
|
||||
async fn codex_mini_latest_tools() {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
let server = MockServer::start().await;
|
||||
@@ -58,6 +73,10 @@ async fn default_system_instructions_contain_apply_patch() {
|
||||
|
||||
let conversation_manager =
|
||||
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
config.include_apply_patch_tool = false;
|
||||
config.model = "codex-mini-latest".to_string();
|
||||
config.model_family = find_family_for_model("codex-mini-latest").unwrap();
|
||||
|
||||
let codex = conversation_manager
|
||||
.new_conversation(config)
|
||||
.await
|
||||
@@ -173,18 +192,6 @@ async fn prompt_tools_are_consistent_across_requests() {
|
||||
// our internal implementation is responsible for keeping tools in sync
|
||||
// with the OpenAI schema, so we just verify the tool presence here
|
||||
let expected_tools_names: &[&str] = &["shell", "update_plan", "apply_patch"];
|
||||
fn assert_tool_names(body: &serde_json::Value, expected_names: &[&str]) {
|
||||
assert_eq!(
|
||||
body["tools"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|t| t["name"].as_str().unwrap().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
expected_names
|
||||
);
|
||||
}
|
||||
|
||||
let body0 = requests[0].body_json::<serde_json::Value>().unwrap();
|
||||
assert_eq!(
|
||||
body0["instructions"],
|
||||
|
||||
Reference in New Issue
Block a user