[apply_patch] freeform apply_patch tool (#2576)

## Summary
GPT-5 introduced the concept of [custom
tools](https://platform.openai.com/docs/guides/function-calling#custom-tools),
which allow the model to send a raw string result back, simplifying
json-escape issues. We are migrating gpt-5 to use this by default.

However, gpt-oss models do not support custom tools, only normal
functions. So we keep both tool definitions, and provide whichever one
the model family supports.

## Testing
- [x] Tested locally with various models
- [x] Unit tests pass
This commit is contained in:
Dylan
2025-08-22 13:42:34 -07:00
committed by GitHub
parent dc42ec0eb4
commit 236c4f76a6
14 changed files with 534 additions and 138 deletions

View File

@@ -24,6 +24,10 @@ pub enum ResponseInputItem {
call_id: String,
result: Result<CallToolResult, String>,
},
CustomToolCallOutput {
call_id: String,
output: String,
},
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@@ -77,6 +81,20 @@ pub enum ResponseItem {
call_id: String,
output: FunctionCallOutputPayload,
},
CustomToolCall {
#[serde(default, skip_serializing_if = "Option::is_none")]
id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
status: Option<String>,
call_id: String,
name: String,
input: String,
},
CustomToolCallOutput {
call_id: String,
output: String,
},
#[serde(other)]
Other,
}
@@ -114,6 +132,9 @@ impl From<ResponseInputItem> for ResponseItem {
),
},
},
ResponseInputItem::CustomToolCallOutput { call_id, output } => {
Self::CustomToolCallOutput { call_id, output }
}
}
}
}