feat: add support for OpenAI tool type, local_shell (#961)

The new `codex-mini-latest` model expects a new tool with `{"type":
"local_shell"}`. Its contract is similar to the existing `function` tool
with `"name": "shell"`, so this takes the `local_shell` tool call into
`ExecParams` and sends it through the existing
`handle_container_exec_with_params()` code path.

This also adds the following logic when adding the default set of tools
to a request:

```rust
let default_tools = if self.model.starts_with("codex") {
    &DEFAULT_CODEX_MODEL_TOOLS
} else {
    &DEFAULT_TOOLS
};
```

That is, if the model name starts with `"codex"`, we add `{"type":
"local_shell"}` to the list of tools; otherwise, we add the
aforementioned `shell` tool.

To test this, I ran the TUI with `-m codex-mini-latest` and verified
that it used the `local_shell` tool. Though I also had some entries in
`[mcp_servers]` in my personal `config.toml`. The `codex-mini-latest`
model seemed eager to try the tools from the MCP servers first, so I
have personally commented them out for now, so keep an eye out if you're
testing `codex-mini-latest`!

Perhaps we should include more details with `{"type": "local_shell"}` or
update the following:


fd0b1b0208/codex-rs/core/prompt.md

For reference, the corresponding change in the TypeScript CLI is
https://github.com/openai/codex/pull/951.
This commit is contained in:
Michael Bolin
2025-05-16 14:38:08 -07:00
committed by GitHub
parent dfd54e1433
commit f48dd99f22
5 changed files with 109 additions and 18 deletions

View File

@@ -40,10 +40,18 @@ use crate::util::backoff;
/// When serialized as JSON, this produces a valid "Tool" in the OpenAI
/// Responses API.
#[derive(Debug, Serialize)]
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type")]
enum OpenAiTool {
#[serde(rename = "function")]
Function(ResponsesApiTool),
#[serde(rename = "local_shell")]
LocalShell {},
}
#[derive(Debug, Clone, Serialize)]
struct ResponsesApiTool {
name: &'static str,
r#type: &'static str, // "function"
description: &'static str,
strict: bool,
parameters: JsonSchema,
@@ -67,7 +75,7 @@ enum JsonSchema {
}
/// Tool usage specification
static DEFAULT_TOOLS: LazyLock<Vec<ResponsesApiTool>> = LazyLock::new(|| {
static DEFAULT_TOOLS: LazyLock<Vec<OpenAiTool>> = LazyLock::new(|| {
let mut properties = BTreeMap::new();
properties.insert(
"command".to_string(),
@@ -78,9 +86,8 @@ static DEFAULT_TOOLS: LazyLock<Vec<ResponsesApiTool>> = LazyLock::new(|| {
properties.insert("workdir".to_string(), JsonSchema::String);
properties.insert("timeout".to_string(), JsonSchema::Number);
vec![ResponsesApiTool {
vec![OpenAiTool::Function(ResponsesApiTool {
name: "shell",
r#type: "function",
description: "Runs a shell command, and returns its output.",
strict: false,
parameters: JsonSchema::Object {
@@ -88,9 +95,12 @@ static DEFAULT_TOOLS: LazyLock<Vec<ResponsesApiTool>> = LazyLock::new(|| {
required: &["command"],
additional_properties: false,
},
}]
})]
});
static DEFAULT_CODEX_MODEL_TOOLS: LazyLock<Vec<OpenAiTool>> =
LazyLock::new(|| vec![OpenAiTool::LocalShell {}]);
#[derive(Clone)]
pub struct ModelClient {
model: String,
@@ -152,8 +162,13 @@ impl ModelClient {
}
// Assemble tool list: built-in tools + any extra tools from the prompt.
let mut tools_json = Vec::with_capacity(DEFAULT_TOOLS.len() + prompt.extra_tools.len());
for t in DEFAULT_TOOLS.iter() {
let default_tools = if self.model.starts_with("codex") {
&DEFAULT_CODEX_MODEL_TOOLS
} else {
&DEFAULT_TOOLS
};
let mut tools_json = Vec::with_capacity(default_tools.len() + prompt.extra_tools.len());
for t in default_tools.iter() {
tools_json.push(serde_json::to_value(t)?);
}
tools_json.extend(