chore: update tool config (#4755)

## Summary
Updates tool config for gpt-5-codex

## Test Plan
- [x] Ran locally
- [x]  Updated unit tests
This commit is contained in:
Dylan
2025-10-04 22:47:26 -07:00
committed by GitHub
parent 06853d94f0
commit 3203862167
5 changed files with 23 additions and 48 deletions

View File

@@ -109,7 +109,6 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
supports_reasoning_summaries: true,
reasoning_summary_format: ReasoningSummaryFormat::Experimental,
base_instructions: GPT_5_CODEX_INSTRUCTIONS.to_string(),
experimental_supported_tools: vec!["read_file".to_string()],
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
)
} else if slug.starts_with("gpt-5") {

View File

@@ -680,24 +680,6 @@ mod tests {
);
}
#[test]
fn test_build_specs_includes_beta_read_file_tool() {
let model_family = find_family_for_model("gpt-5-codex")
.expect("gpt-5-codex should be a valid model family");
let config = ToolsConfig::new(&ToolsConfigParams {
model_family: &model_family,
include_plan_tool: false,
include_apply_patch_tool: false,
include_web_search_request: false,
use_streamable_shell_tool: false,
include_view_image_tool: false,
experimental_unified_exec_tool: true,
});
let (tools, _) = build_specs(&config, Some(HashMap::new())).build();
assert_eq_tool_names(&tools, &["unified_exec", "apply_patch", "read_file"]);
}
#[test]
fn test_build_specs_mcp_tools() {
let model_family = find_family_for_model("o3").expect("o3 should be a valid model family");
@@ -922,7 +904,6 @@ mod tests {
&[
"unified_exec",
"apply_patch",
"read_file",
"web_search",
"view_image",
"dash/search",
@@ -930,7 +911,7 @@ mod tests {
);
assert_eq!(
tools[5],
tools[4],
ToolSpec::Function(ResponsesApiTool {
name: "dash/search".to_string(),
parameters: JsonSchema::Object {
@@ -990,14 +971,13 @@ mod tests {
&[
"unified_exec",
"apply_patch",
"read_file",
"web_search",
"view_image",
"dash/paginate",
],
);
assert_eq!(
tools[5],
tools[4],
ToolSpec::Function(ResponsesApiTool {
name: "dash/paginate".to_string(),
parameters: JsonSchema::Object {
@@ -1055,14 +1035,13 @@ mod tests {
&[
"unified_exec",
"apply_patch",
"read_file",
"web_search",
"view_image",
"dash/tags",
],
);
assert_eq!(
tools[5],
tools[4],
ToolSpec::Function(ResponsesApiTool {
name: "dash/tags".to_string(),
parameters: JsonSchema::Object {
@@ -1123,14 +1102,13 @@ mod tests {
&[
"unified_exec",
"apply_patch",
"read_file",
"web_search",
"view_image",
"dash/value",
],
);
assert_eq!(
tools[5],
tools[4],
ToolSpec::Function(ResponsesApiTool {
name: "dash/value".to_string(),
parameters: JsonSchema::Object {
@@ -1228,7 +1206,6 @@ mod tests {
&[
"unified_exec",
"apply_patch",
"read_file",
"web_search",
"view_image",
"test_server/do_something_cool",
@@ -1236,7 +1213,7 @@ mod tests {
);
assert_eq!(
tools[5],
tools[4],
ToolSpec::Function(ResponsesApiTool {
name: "test_server/do_something_cool".to_string(),
parameters: JsonSchema::Object {

View File

@@ -125,11 +125,7 @@ async fn model_selects_expected_tools() {
let gpt5_codex_tools = collect_tool_identifiers_for_model("gpt-5-codex").await;
assert_eq!(
gpt5_codex_tools,
vec![
"shell".to_string(),
"apply_patch".to_string(),
"read_file".to_string()
],
"gpt-5-codex should expose the beta read_file tool",
vec!["shell".to_string(), "apply_patch".to_string(),],
"gpt-5-codex should expose the apply_patch tool",
);
}

View File

@@ -180,16 +180,16 @@ async fn prompt_tools_are_consistent_across_requests() {
let cwd = TempDir::new().unwrap();
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
config.cwd = cwd.path().to_path_buf();
config.model_provider = model_provider;
config.user_instructions = Some("be consistent and helpful".to_string());
config.include_apply_patch_tool = true;
config.include_plan_tool = true;
let conversation_manager =
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
let expected_instructions = config.model_family.base_instructions.clone();
let base_instructions = config.model_family.base_instructions.clone();
let codex = conversation_manager
.new_conversation(config)
.await
@@ -222,19 +222,10 @@ async fn prompt_tools_are_consistent_across_requests() {
// our internal implementation is responsible for keeping tools in sync
// with the OpenAI schema, so we just verify the tool presence here
let tools_by_model: HashMap<&'static str, Vec<&'static str>> = HashMap::from([
(
"gpt-5",
vec!["shell", "update_plan", "apply_patch", "view_image"],
),
("gpt-5", vec!["shell", "update_plan", "view_image"]),
(
"gpt-5-codex",
vec![
"shell",
"update_plan",
"apply_patch",
"read_file",
"view_image",
],
vec!["shell", "update_plan", "apply_patch", "view_image"],
),
]);
let expected_tools_names = tools_by_model
@@ -242,6 +233,17 @@ async fn prompt_tools_are_consistent_across_requests() {
.unwrap_or_else(|| panic!("expected tools to be defined for model {OPENAI_DEFAULT_MODEL}"))
.as_slice();
let body0 = requests[0].body_json::<serde_json::Value>().unwrap();
let expected_instructions = if expected_tools_names.contains(&"apply_patch") {
base_instructions
} else {
[
base_instructions.clone(),
include_str!("../../../apply-patch/apply_patch_tool_instructions.md").to_string(),
]
.join("\n")
};
assert_eq!(
body0["instructions"],
serde_json::json!(expected_instructions),

View File

@@ -21,6 +21,7 @@ use serde_json::Value;
use wiremock::matchers::any;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[ignore = "disabled until we enable read_file tool"]
async fn read_file_tool_returns_requested_lines() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));