Files
llmx/codex-rs/core/src/config.rs
Gabriel Peal 3a1be084f9 [MCP] Add experimental support for streamable HTTP MCP servers (#4317)
This PR adds support for streamable HTTP MCP servers when the
`experimental_use_rmcp_client` is enabled.

To set one up, simply add a new mcp server config with the url:
```
[mcp_servers.figma]
url = "http://127.0.0.1:3845/mcp"
```

It also supports an optional `bearer_token` which will be provided in an
authorization header. The full oauth flow is not supported yet.

The config parsing will throw if it detects that the user mixed and
matched config fields (like command + bearer token or url + env).

The best way to review it is to review `core/src` and then
`rmcp-client/src/rmcp_client.rs` first. The rest is tests and
propagating the `Transport` struct around the codebase.

Example with the Figma MCP:
<img width="5084" height="1614" alt="CleanShot 2025-09-26 at 13 35 40"
src="https://github.com/user-attachments/assets/eaf2771e-df3e-4300-816b-184d7dec5a28"
/>
2025-09-26 21:24:01 -04:00

2151 lines
75 KiB
Rust

use crate::config_profile::ConfigProfile;
use crate::config_types::History;
use crate::config_types::McpServerConfig;
use crate::config_types::McpServerTransportConfig;
use crate::config_types::Notifications;
use crate::config_types::ReasoningSummaryFormat;
use crate::config_types::SandboxWorkspaceWrite;
use crate::config_types::ShellEnvironmentPolicy;
use crate::config_types::ShellEnvironmentPolicyToml;
use crate::config_types::Tui;
use crate::config_types::UriBasedFileOpener;
use crate::git_info::resolve_root_git_project_for_trust;
use crate::model_family::ModelFamily;
use crate::model_family::derive_default_model_family;
use crate::model_family::find_family_for_model;
use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::built_in_model_providers;
use crate::openai_model_info::get_model_info;
use crate::protocol::AskForApproval;
use crate::protocol::SandboxPolicy;
use anyhow::Context;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::config_types::Verbosity;
use codex_protocol::mcp_protocol::Tools;
use codex_protocol::mcp_protocol::UserSavedConfig;
use dirs::home_dir;
use serde::Deserialize;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use tempfile::NamedTempFile;
use toml::Value as TomlValue;
use toml_edit::Array as TomlArray;
use toml_edit::DocumentMut;
use toml_edit::Item as TomlItem;
use toml_edit::Table as TomlTable;
const OPENAI_DEFAULT_MODEL: &str = "gpt-5-codex";
const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5-codex";
pub const GPT_5_CODEX_MEDIUM_MODEL: &str = "gpt-5-codex";
/// Maximum number of bytes of the documentation that will be embedded. Larger
/// files are *silently truncated* to this size so we do not take up too much of
/// the context window.
pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB
pub(crate) const CONFIG_TOML_FILE: &str = "config.toml";
/// Application configuration loaded from disk and merged with overrides.
#[derive(Debug, Clone, PartialEq)]
pub struct Config {
/// Optional override of model selection.
pub model: String,
/// Model used specifically for review sessions. Defaults to "gpt-5-codex".
pub review_model: String,
pub model_family: ModelFamily,
/// Size of the context window for the model, in tokens.
pub model_context_window: Option<u64>,
/// Maximum number of output tokens.
pub model_max_output_tokens: Option<u64>,
/// Token usage threshold triggering auto-compaction of conversation history.
pub model_auto_compact_token_limit: Option<i64>,
/// Key into the model_providers map that specifies which provider to use.
pub model_provider_id: String,
/// Info needed to make an API request to the model.
pub model_provider: ModelProviderInfo,
/// Approval policy for executing commands.
pub approval_policy: AskForApproval,
pub sandbox_policy: SandboxPolicy,
pub shell_environment_policy: ShellEnvironmentPolicy,
/// When `true`, `AgentReasoning` events emitted by the backend will be
/// suppressed from the frontend output. This can reduce visual noise when
/// users are only interested in the final agent responses.
pub hide_agent_reasoning: bool,
/// When set to `true`, `AgentReasoningRawContentEvent` events will be shown in the UI/output.
/// Defaults to `false`.
pub show_raw_agent_reasoning: bool,
/// User-provided instructions from AGENTS.md.
pub user_instructions: Option<String>,
/// Base instructions override.
pub base_instructions: Option<String>,
/// Optional external notifier command. When set, Codex will spawn this
/// program after each completed *turn* (i.e. when the agent finishes
/// processing a user submission). The value must be the full command
/// broken into argv tokens **without** the trailing JSON argument - Codex
/// appends one extra argument containing a JSON payload describing the
/// event.
///
/// Example `~/.codex/config.toml` snippet:
///
/// ```toml
/// notify = ["notify-send", "Codex"]
/// ```
///
/// which will be invoked as:
///
/// ```shell
/// notify-send Codex '{"type":"agent-turn-complete","turn-id":"12345"}'
/// ```
///
/// If unset the feature is disabled.
pub notify: Option<Vec<String>>,
/// TUI notifications preference. When set, the TUI will send OSC 9 notifications on approvals
/// and turn completions when not focused.
pub tui_notifications: Notifications,
/// The directory that should be treated as the current working directory
/// for the session. All relative paths inside the business-logic layer are
/// resolved against this path.
pub cwd: PathBuf,
/// Definition for MCP servers that Codex can reach out to for tool calls.
pub mcp_servers: HashMap<String, McpServerConfig>,
/// Combined provider map (defaults merged with user-defined overrides).
pub model_providers: HashMap<String, ModelProviderInfo>,
/// Maximum number of bytes to include from an AGENTS.md project doc file.
pub project_doc_max_bytes: usize,
/// Directory containing all Codex state (defaults to `~/.codex` but can be
/// overridden by the `CODEX_HOME` environment variable).
pub codex_home: PathBuf,
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
pub history: History,
/// Optional URI-based file opener. If set, citations to files in the model
/// output will be hyperlinked using the specified URI scheme.
pub file_opener: UriBasedFileOpener,
/// Path to the `codex-linux-sandbox` executable. This must be set if
/// [`crate::exec::SandboxType::LinuxSeccomp`] is used. Note that this
/// cannot be set in the config file: it must be set in code via
/// [`ConfigOverrides`].
///
/// When this program is invoked, arg0 will be set to `codex-linux-sandbox`.
pub codex_linux_sandbox_exe: Option<PathBuf>,
/// Value to use for `reasoning.effort` when making a request using the
/// Responses API.
pub model_reasoning_effort: Option<ReasoningEffort>,
/// If not "none", the value to use for `reasoning.summary` when making a
/// request using the Responses API.
pub model_reasoning_summary: ReasoningSummary,
/// Optional verbosity control for GPT-5 models (Responses API `text.verbosity`).
pub model_verbosity: Option<Verbosity>,
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: String,
/// Include an experimental plan tool that the model can use to update its current plan and status of each step.
pub include_plan_tool: bool,
/// Include the `apply_patch` tool for models that benefit from invoking
/// file edits as a structured tool call. When unset, this falls back to the
/// model family's default preference.
pub include_apply_patch_tool: bool,
pub tools_web_search_request: bool,
pub use_experimental_streamable_shell_tool: bool,
/// If set to `true`, used only the experimental unified exec tool.
pub use_experimental_unified_exec_tool: bool,
/// If set to `true`, use the experimental official Rust MCP client.
/// https://github.com/modelcontextprotocol/rust-sdk
pub use_experimental_use_rmcp_client: bool,
/// Include the `view_image` tool that lets the agent attach a local image path to context.
pub include_view_image_tool: bool,
/// The active profile name used to derive this `Config` (if any).
pub active_profile: Option<String>,
/// When true, disables burst-paste detection for typed input entirely.
/// All characters are inserted as they are received, and no buffering
/// or placeholder replacement will occur for fast keypress bursts.
pub disable_paste_burst: bool,
}
impl Config {
/// Load configuration with *generic* CLI overrides (`-c key=value`) applied
/// **in between** the values parsed from `config.toml` and the
/// strongly-typed overrides specified via [`ConfigOverrides`].
///
/// The precedence order is therefore: `config.toml` < `-c` overrides <
/// `ConfigOverrides`.
pub fn load_with_cli_overrides(
cli_overrides: Vec<(String, TomlValue)>,
overrides: ConfigOverrides,
) -> std::io::Result<Self> {
// Resolve the directory that stores Codex state (e.g. ~/.codex or the
// value of $CODEX_HOME) so we can embed it into the resulting
// `Config` instance.
let codex_home = find_codex_home()?;
// Step 1: parse `config.toml` into a generic JSON value.
let mut root_value = load_config_as_toml(&codex_home)?;
// Step 2: apply the `-c` overrides.
for (path, value) in cli_overrides.into_iter() {
apply_toml_override(&mut root_value, &path, value);
}
// Step 3: deserialize into `ConfigToml` so that Serde can enforce the
// correct types.
let cfg: ConfigToml = root_value.try_into().map_err(|e| {
tracing::error!("Failed to deserialize overridden config: {e}");
std::io::Error::new(std::io::ErrorKind::InvalidData, e)
})?;
// Step 4: merge with the strongly-typed overrides.
Self::load_from_base_config_with_overrides(cfg, overrides, codex_home)
}
}
pub fn load_config_as_toml_with_cli_overrides(
codex_home: &Path,
cli_overrides: Vec<(String, TomlValue)>,
) -> std::io::Result<ConfigToml> {
let mut root_value = load_config_as_toml(codex_home)?;
for (path, value) in cli_overrides.into_iter() {
apply_toml_override(&mut root_value, &path, value);
}
let cfg: ConfigToml = root_value.try_into().map_err(|e| {
tracing::error!("Failed to deserialize overridden config: {e}");
std::io::Error::new(std::io::ErrorKind::InvalidData, e)
})?;
Ok(cfg)
}
/// Read `CODEX_HOME/config.toml` and return it as a generic TOML value. Returns
/// an empty TOML table when the file does not exist.
pub fn load_config_as_toml(codex_home: &Path) -> std::io::Result<TomlValue> {
let config_path = codex_home.join(CONFIG_TOML_FILE);
match std::fs::read_to_string(&config_path) {
Ok(contents) => match toml::from_str::<TomlValue>(&contents) {
Ok(val) => Ok(val),
Err(e) => {
tracing::error!("Failed to parse config.toml: {e}");
Err(std::io::Error::new(std::io::ErrorKind::InvalidData, e))
}
},
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
tracing::info!("config.toml not found, using defaults");
Ok(TomlValue::Table(Default::default()))
}
Err(e) => {
tracing::error!("Failed to read config.toml: {e}");
Err(e)
}
}
}
pub fn load_global_mcp_servers(
codex_home: &Path,
) -> std::io::Result<BTreeMap<String, McpServerConfig>> {
let root_value = load_config_as_toml(codex_home)?;
let Some(servers_value) = root_value.get("mcp_servers") else {
return Ok(BTreeMap::new());
};
servers_value
.clone()
.try_into()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))
}
pub fn write_global_mcp_servers(
codex_home: &Path,
servers: &BTreeMap<String, McpServerConfig>,
) -> std::io::Result<()> {
let config_path = codex_home.join(CONFIG_TOML_FILE);
let mut doc = match std::fs::read_to_string(&config_path) {
Ok(contents) => contents
.parse::<DocumentMut>()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => DocumentMut::new(),
Err(e) => return Err(e),
};
doc.as_table_mut().remove("mcp_servers");
if !servers.is_empty() {
let mut table = TomlTable::new();
table.set_implicit(true);
doc["mcp_servers"] = TomlItem::Table(table);
for (name, config) in servers {
let mut entry = TomlTable::new();
entry.set_implicit(false);
match &config.transport {
McpServerTransportConfig::Stdio { command, args, env } => {
entry["command"] = toml_edit::value(command.clone());
if !args.is_empty() {
let mut args_array = TomlArray::new();
for arg in args {
args_array.push(arg.clone());
}
entry["args"] = TomlItem::Value(args_array.into());
}
if let Some(env) = env
&& !env.is_empty()
{
let mut env_table = TomlTable::new();
env_table.set_implicit(false);
let mut pairs: Vec<_> = env.iter().collect();
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
for (key, value) in pairs {
env_table.insert(key, toml_edit::value(value.clone()));
}
entry["env"] = TomlItem::Table(env_table);
}
}
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
entry["url"] = toml_edit::value(url.clone());
if let Some(token) = bearer_token {
entry["bearer_token"] = toml_edit::value(token.clone());
}
}
}
if let Some(timeout) = config.startup_timeout_sec {
entry["startup_timeout_sec"] = toml_edit::value(timeout.as_secs_f64());
}
if let Some(timeout) = config.tool_timeout_sec {
entry["tool_timeout_sec"] = toml_edit::value(timeout.as_secs_f64());
}
doc["mcp_servers"][name.as_str()] = TomlItem::Table(entry);
}
}
std::fs::create_dir_all(codex_home)?;
let tmp_file = NamedTempFile::new_in(codex_home)?;
std::fs::write(tmp_file.path(), doc.to_string())?;
tmp_file.persist(config_path).map_err(|err| err.error)?;
Ok(())
}
fn set_project_trusted_inner(doc: &mut DocumentMut, project_path: &Path) -> anyhow::Result<()> {
// Ensure we render a human-friendly structure:
//
// [projects]
// [projects."/path/to/project"]
// trust_level = "trusted"
//
// rather than inline tables like:
//
// [projects]
// "/path/to/project" = { trust_level = "trusted" }
let project_key = project_path.to_string_lossy().to_string();
// Ensure top-level `projects` exists as a non-inline, explicit table. If it
// exists but was previously represented as a non-table (e.g., inline),
// replace it with an explicit table.
{
let root = doc.as_table_mut();
// If `projects` exists but isn't a standard table (e.g., it's an inline table),
// convert it to an explicit table while preserving existing entries.
let existing_projects = root.get("projects").cloned();
if existing_projects.as_ref().is_none_or(|i| !i.is_table()) {
let mut projects_tbl = toml_edit::Table::new();
projects_tbl.set_implicit(true);
// If there was an existing inline table, migrate its entries to explicit tables.
if let Some(inline_tbl) = existing_projects.as_ref().and_then(|i| i.as_inline_table()) {
for (k, v) in inline_tbl.iter() {
if let Some(inner_tbl) = v.as_inline_table() {
let new_tbl = inner_tbl.clone().into_table();
projects_tbl.insert(k, toml_edit::Item::Table(new_tbl));
}
}
}
root.insert("projects", toml_edit::Item::Table(projects_tbl));
}
}
let Some(projects_tbl) = doc["projects"].as_table_mut() else {
return Err(anyhow::anyhow!(
"projects table missing after initialization"
));
};
// Ensure the per-project entry is its own explicit table. If it exists but
// is not a table (e.g., an inline table), replace it with an explicit table.
let needs_proj_table = !projects_tbl.contains_key(project_key.as_str())
|| projects_tbl
.get(project_key.as_str())
.and_then(|i| i.as_table())
.is_none();
if needs_proj_table {
projects_tbl.insert(project_key.as_str(), toml_edit::table());
}
let Some(proj_tbl) = projects_tbl
.get_mut(project_key.as_str())
.and_then(|i| i.as_table_mut())
else {
return Err(anyhow::anyhow!("project table missing for {}", project_key));
};
proj_tbl.set_implicit(false);
proj_tbl["trust_level"] = toml_edit::value("trusted");
Ok(())
}
/// Patch `CODEX_HOME/config.toml` project state.
/// Use with caution.
pub fn set_project_trusted(codex_home: &Path, project_path: &Path) -> anyhow::Result<()> {
let config_path = codex_home.join(CONFIG_TOML_FILE);
// Parse existing config if present; otherwise start a new document.
let mut doc = match std::fs::read_to_string(config_path.clone()) {
Ok(s) => s.parse::<DocumentMut>()?,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => DocumentMut::new(),
Err(e) => return Err(e.into()),
};
set_project_trusted_inner(&mut doc, project_path)?;
// ensure codex_home exists
std::fs::create_dir_all(codex_home)?;
// create a tmp_file
let tmp_file = NamedTempFile::new_in(codex_home)?;
std::fs::write(tmp_file.path(), doc.to_string())?;
// atomically move the tmp file into config.toml
tmp_file.persist(config_path)?;
Ok(())
}
fn ensure_profile_table<'a>(
doc: &'a mut DocumentMut,
profile_name: &str,
) -> anyhow::Result<&'a mut toml_edit::Table> {
let mut created_profiles_table = false;
{
let root = doc.as_table_mut();
let needs_table = !root.contains_key("profiles")
|| root
.get("profiles")
.and_then(|item| item.as_table())
.is_none();
if needs_table {
root.insert("profiles", toml_edit::table());
created_profiles_table = true;
}
}
let Some(profiles_table) = doc["profiles"].as_table_mut() else {
return Err(anyhow::anyhow!(
"profiles table missing after initialization"
));
};
if created_profiles_table {
profiles_table.set_implicit(true);
}
let needs_profile_table = !profiles_table.contains_key(profile_name)
|| profiles_table
.get(profile_name)
.and_then(|item| item.as_table())
.is_none();
if needs_profile_table {
profiles_table.insert(profile_name, toml_edit::table());
}
let Some(profile_table) = profiles_table
.get_mut(profile_name)
.and_then(|item| item.as_table_mut())
else {
return Err(anyhow::anyhow!(format!(
"profile table missing for {profile_name}"
)));
};
profile_table.set_implicit(false);
Ok(profile_table)
}
// TODO(jif) refactor config persistence.
pub async fn persist_model_selection(
codex_home: &Path,
active_profile: Option<&str>,
model: &str,
effort: Option<ReasoningEffort>,
) -> anyhow::Result<()> {
let config_path = codex_home.join(CONFIG_TOML_FILE);
let serialized = match tokio::fs::read_to_string(&config_path).await {
Ok(contents) => contents,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => String::new(),
Err(err) => return Err(err.into()),
};
let mut doc = if serialized.is_empty() {
DocumentMut::new()
} else {
serialized.parse::<DocumentMut>()?
};
if let Some(profile_name) = active_profile {
let profile_table = ensure_profile_table(&mut doc, profile_name)?;
profile_table["model"] = toml_edit::value(model);
match effort {
Some(effort) => {
profile_table["model_reasoning_effort"] = toml_edit::value(effort.to_string());
}
None => {
profile_table.remove("model_reasoning_effort");
}
}
} else {
let table = doc.as_table_mut();
table["model"] = toml_edit::value(model);
match effort {
Some(effort) => {
table["model_reasoning_effort"] = toml_edit::value(effort.to_string());
}
None => {
table.remove("model_reasoning_effort");
}
}
}
// TODO(jif) refactor the home creation
tokio::fs::create_dir_all(codex_home)
.await
.with_context(|| {
format!(
"failed to create Codex home directory at {}",
codex_home.display()
)
})?;
tokio::fs::write(&config_path, doc.to_string())
.await
.with_context(|| format!("failed to persist config.toml at {}", config_path.display()))?;
Ok(())
}
/// Apply a single dotted-path override onto a TOML value.
fn apply_toml_override(root: &mut TomlValue, path: &str, value: TomlValue) {
use toml::value::Table;
let segments: Vec<&str> = path.split('.').collect();
let mut current = root;
for (idx, segment) in segments.iter().enumerate() {
let is_last = idx == segments.len() - 1;
if is_last {
match current {
TomlValue::Table(table) => {
table.insert(segment.to_string(), value);
}
_ => {
let mut table = Table::new();
table.insert(segment.to_string(), value);
*current = TomlValue::Table(table);
}
}
return;
}
// Traverse or create intermediate object.
match current {
TomlValue::Table(table) => {
current = table
.entry(segment.to_string())
.or_insert_with(|| TomlValue::Table(Table::new()));
}
_ => {
*current = TomlValue::Table(Table::new());
if let TomlValue::Table(tbl) = current {
current = tbl
.entry(segment.to_string())
.or_insert_with(|| TomlValue::Table(Table::new()));
}
}
}
}
}
/// Base config deserialized from ~/.codex/config.toml.
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
pub struct ConfigToml {
/// Optional override of model selection.
pub model: Option<String>,
/// Review model override used by the `/review` feature.
pub review_model: Option<String>,
/// Provider to use from the model_providers map.
pub model_provider: Option<String>,
/// Size of the context window for the model, in tokens.
pub model_context_window: Option<u64>,
/// Maximum number of output tokens.
pub model_max_output_tokens: Option<u64>,
/// Token usage threshold triggering auto-compaction of conversation history.
pub model_auto_compact_token_limit: Option<i64>,
/// Default approval policy for executing commands.
pub approval_policy: Option<AskForApproval>,
#[serde(default)]
pub shell_environment_policy: ShellEnvironmentPolicyToml,
/// Sandbox mode to use.
pub sandbox_mode: Option<SandboxMode>,
/// Sandbox configuration to apply if `sandbox` is `WorkspaceWrite`.
pub sandbox_workspace_write: Option<SandboxWorkspaceWrite>,
/// Optional external command to spawn for end-user notifications.
#[serde(default)]
pub notify: Option<Vec<String>>,
/// System instructions.
pub instructions: Option<String>,
/// Definition for MCP servers that Codex can reach out to for tool calls.
#[serde(default)]
pub mcp_servers: HashMap<String, McpServerConfig>,
/// User-defined provider entries that extend/override the built-in list.
#[serde(default)]
pub model_providers: HashMap<String, ModelProviderInfo>,
/// Maximum number of bytes to include from an AGENTS.md project doc file.
pub project_doc_max_bytes: Option<usize>,
/// Profile to use from the `profiles` map.
pub profile: Option<String>,
/// Named profiles to facilitate switching between different configurations.
#[serde(default)]
pub profiles: HashMap<String, ConfigProfile>,
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
#[serde(default)]
pub history: Option<History>,
/// Optional URI-based file opener. If set, citations to files in the model
/// output will be hyperlinked using the specified URI scheme.
pub file_opener: Option<UriBasedFileOpener>,
/// Collection of settings that are specific to the TUI.
pub tui: Option<Tui>,
/// When set to `true`, `AgentReasoning` events will be hidden from the
/// UI/output. Defaults to `false`.
pub hide_agent_reasoning: Option<bool>,
/// When set to `true`, `AgentReasoningRawContentEvent` events will be shown in the UI/output.
/// Defaults to `false`.
pub show_raw_agent_reasoning: Option<bool>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
/// Optional verbosity control for GPT-5 models (Responses API `text.verbosity`).
pub model_verbosity: Option<Verbosity>,
/// Override to force-enable reasoning summaries for the configured model.
pub model_supports_reasoning_summaries: Option<bool>,
/// Override to force reasoning summary format for the configured model.
pub model_reasoning_summary_format: Option<ReasoningSummaryFormat>,
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: Option<String>,
/// Experimental path to a file whose contents replace the built-in BASE_INSTRUCTIONS.
pub experimental_instructions_file: Option<PathBuf>,
pub experimental_use_exec_command_tool: Option<bool>,
pub experimental_use_unified_exec_tool: Option<bool>,
pub experimental_use_rmcp_client: Option<bool>,
pub projects: Option<HashMap<String, ProjectConfig>>,
/// Nested tools section for feature toggles
pub tools: Option<ToolsToml>,
/// When true, disables burst-paste detection for typed input entirely.
/// All characters are inserted as they are received, and no buffering
/// or placeholder replacement will occur for fast keypress bursts.
pub disable_paste_burst: Option<bool>,
}
impl From<ConfigToml> for UserSavedConfig {
fn from(config_toml: ConfigToml) -> Self {
let profiles = config_toml
.profiles
.into_iter()
.map(|(k, v)| (k, v.into()))
.collect();
Self {
approval_policy: config_toml.approval_policy,
sandbox_mode: config_toml.sandbox_mode,
sandbox_settings: config_toml.sandbox_workspace_write.map(From::from),
model: config_toml.model,
model_reasoning_effort: config_toml.model_reasoning_effort,
model_reasoning_summary: config_toml.model_reasoning_summary,
model_verbosity: config_toml.model_verbosity,
tools: config_toml.tools.map(From::from),
profile: config_toml.profile,
profiles,
}
}
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct ProjectConfig {
pub trust_level: Option<String>,
}
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
pub struct ToolsToml {
#[serde(default, alias = "web_search_request")]
pub web_search: Option<bool>,
/// Enable the `view_image` tool that lets the agent attach local images.
#[serde(default)]
pub view_image: Option<bool>,
}
impl From<ToolsToml> for Tools {
fn from(tools_toml: ToolsToml) -> Self {
Self {
web_search: tools_toml.web_search,
view_image: tools_toml.view_image,
}
}
}
impl ConfigToml {
/// Derive the effective sandbox policy from the configuration.
fn derive_sandbox_policy(&self, sandbox_mode_override: Option<SandboxMode>) -> SandboxPolicy {
let resolved_sandbox_mode = sandbox_mode_override
.or(self.sandbox_mode)
.unwrap_or_default();
match resolved_sandbox_mode {
SandboxMode::ReadOnly => SandboxPolicy::new_read_only_policy(),
SandboxMode::WorkspaceWrite => match self.sandbox_workspace_write.as_ref() {
Some(SandboxWorkspaceWrite {
writable_roots,
network_access,
exclude_tmpdir_env_var,
exclude_slash_tmp,
}) => SandboxPolicy::WorkspaceWrite {
writable_roots: writable_roots.clone(),
network_access: *network_access,
exclude_tmpdir_env_var: *exclude_tmpdir_env_var,
exclude_slash_tmp: *exclude_slash_tmp,
},
None => SandboxPolicy::new_workspace_write_policy(),
},
SandboxMode::DangerFullAccess => SandboxPolicy::DangerFullAccess,
}
}
pub fn is_cwd_trusted(&self, resolved_cwd: &Path) -> bool {
let projects = self.projects.clone().unwrap_or_default();
let is_path_trusted = |path: &Path| {
let path_str = path.to_string_lossy().to_string();
projects
.get(&path_str)
.map(|p| p.trust_level.as_deref() == Some("trusted"))
.unwrap_or(false)
};
// Fast path: exact cwd match
if is_path_trusted(resolved_cwd) {
return true;
}
// If cwd lives inside a git worktree, check whether the root git project
// (the primary repository working directory) is trusted. This lets
// worktrees inherit trust from the main project.
if let Some(root_project) = resolve_root_git_project_for_trust(resolved_cwd) {
return is_path_trusted(&root_project);
}
false
}
pub fn get_config_profile(
&self,
override_profile: Option<String>,
) -> Result<ConfigProfile, std::io::Error> {
let profile = override_profile.or_else(|| self.profile.clone());
match profile {
Some(key) => {
if let Some(profile) = self.profiles.get(key.as_str()) {
return Ok(profile.clone());
}
Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("config profile `{key}` not found"),
))
}
None => Ok(ConfigProfile::default()),
}
}
}
/// Optional overrides for user configuration (e.g., from CLI flags).
#[derive(Default, Debug, Clone)]
pub struct ConfigOverrides {
pub model: Option<String>,
pub review_model: Option<String>,
pub cwd: Option<PathBuf>,
pub approval_policy: Option<AskForApproval>,
pub sandbox_mode: Option<SandboxMode>,
pub model_provider: Option<String>,
pub config_profile: Option<String>,
pub codex_linux_sandbox_exe: Option<PathBuf>,
pub base_instructions: Option<String>,
pub include_plan_tool: Option<bool>,
pub include_apply_patch_tool: Option<bool>,
pub include_view_image_tool: Option<bool>,
pub show_raw_agent_reasoning: Option<bool>,
pub tools_web_search_request: Option<bool>,
}
impl Config {
/// Meant to be used exclusively for tests: `load_with_overrides()` should
/// be used in all other cases.
pub fn load_from_base_config_with_overrides(
cfg: ConfigToml,
overrides: ConfigOverrides,
codex_home: PathBuf,
) -> std::io::Result<Self> {
let user_instructions = Self::load_instructions(Some(&codex_home));
// Destructure ConfigOverrides fully to ensure all overrides are applied.
let ConfigOverrides {
model,
review_model: override_review_model,
cwd,
approval_policy,
sandbox_mode,
model_provider,
config_profile: config_profile_key,
codex_linux_sandbox_exe,
base_instructions,
include_plan_tool,
include_apply_patch_tool,
include_view_image_tool,
show_raw_agent_reasoning,
tools_web_search_request: override_tools_web_search_request,
} = overrides;
let active_profile_name = config_profile_key
.as_ref()
.or(cfg.profile.as_ref())
.cloned();
let config_profile = match active_profile_name.as_ref() {
Some(key) => cfg
.profiles
.get(key)
.ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("config profile `{key}` not found"),
)
})?
.clone(),
None => ConfigProfile::default(),
};
let sandbox_policy = cfg.derive_sandbox_policy(sandbox_mode);
let mut model_providers = built_in_model_providers();
// Merge user-defined providers into the built-in list.
for (key, provider) in cfg.model_providers.into_iter() {
model_providers.entry(key).or_insert(provider);
}
let model_provider_id = model_provider
.or(config_profile.model_provider)
.or(cfg.model_provider)
.unwrap_or_else(|| "openai".to_string());
let model_provider = model_providers
.get(&model_provider_id)
.ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("Model provider `{model_provider_id}` not found"),
)
})?
.clone();
let shell_environment_policy = cfg.shell_environment_policy.into();
let resolved_cwd = {
use std::env;
match cwd {
None => {
tracing::info!("cwd not set, using current dir");
env::current_dir()?
}
Some(p) if p.is_absolute() => p,
Some(p) => {
// Resolve relative path against the current working directory.
tracing::info!("cwd is relative, resolving against current dir");
let mut current = env::current_dir()?;
current.push(p);
current
}
}
};
let history = cfg.history.unwrap_or_default();
let tools_web_search_request = override_tools_web_search_request
.or(cfg.tools.as_ref().and_then(|t| t.web_search))
.unwrap_or(false);
let include_view_image_tool = include_view_image_tool
.or(cfg.tools.as_ref().and_then(|t| t.view_image))
.unwrap_or(true);
let model = model
.or(config_profile.model)
.or(cfg.model)
.unwrap_or_else(default_model);
let mut model_family =
find_family_for_model(&model).unwrap_or_else(|| derive_default_model_family(&model));
if let Some(supports_reasoning_summaries) = cfg.model_supports_reasoning_summaries {
model_family.supports_reasoning_summaries = supports_reasoning_summaries;
}
if let Some(model_reasoning_summary_format) = cfg.model_reasoning_summary_format {
model_family.reasoning_summary_format = model_reasoning_summary_format;
}
let openai_model_info = get_model_info(&model_family);
let model_context_window = cfg
.model_context_window
.or_else(|| openai_model_info.as_ref().map(|info| info.context_window));
let model_max_output_tokens = cfg.model_max_output_tokens.or_else(|| {
openai_model_info
.as_ref()
.map(|info| info.max_output_tokens)
});
let model_auto_compact_token_limit = cfg.model_auto_compact_token_limit.or_else(|| {
openai_model_info
.as_ref()
.and_then(|info| info.auto_compact_token_limit)
});
// Load base instructions override from a file if specified. If the
// path is relative, resolve it against the effective cwd so the
// behaviour matches other path-like config values.
let experimental_instructions_path = config_profile
.experimental_instructions_file
.as_ref()
.or(cfg.experimental_instructions_file.as_ref());
let file_base_instructions =
Self::get_base_instructions(experimental_instructions_path, &resolved_cwd)?;
let base_instructions = base_instructions.or(file_base_instructions);
// Default review model when not set in config; allow CLI override to take precedence.
let review_model = override_review_model
.or(cfg.review_model)
.unwrap_or_else(default_review_model);
let config = Self {
model,
review_model,
model_family,
model_context_window,
model_max_output_tokens,
model_auto_compact_token_limit,
model_provider_id,
model_provider,
cwd: resolved_cwd,
approval_policy: approval_policy
.or(config_profile.approval_policy)
.or(cfg.approval_policy)
.unwrap_or_else(AskForApproval::default),
sandbox_policy,
shell_environment_policy,
notify: cfg.notify,
user_instructions,
base_instructions,
mcp_servers: cfg.mcp_servers,
model_providers,
project_doc_max_bytes: cfg.project_doc_max_bytes.unwrap_or(PROJECT_DOC_MAX_BYTES),
codex_home,
history,
file_opener: cfg.file_opener.unwrap_or(UriBasedFileOpener::VsCode),
codex_linux_sandbox_exe,
hide_agent_reasoning: cfg.hide_agent_reasoning.unwrap_or(false),
show_raw_agent_reasoning: cfg
.show_raw_agent_reasoning
.or(show_raw_agent_reasoning)
.unwrap_or(false),
model_reasoning_effort: config_profile
.model_reasoning_effort
.or(cfg.model_reasoning_effort),
model_reasoning_summary: config_profile
.model_reasoning_summary
.or(cfg.model_reasoning_summary)
.unwrap_or_default(),
model_verbosity: config_profile.model_verbosity.or(cfg.model_verbosity),
chatgpt_base_url: config_profile
.chatgpt_base_url
.or(cfg.chatgpt_base_url)
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
include_plan_tool: include_plan_tool.unwrap_or(false),
include_apply_patch_tool: include_apply_patch_tool.unwrap_or(false),
tools_web_search_request,
use_experimental_streamable_shell_tool: cfg
.experimental_use_exec_command_tool
.unwrap_or(false),
use_experimental_unified_exec_tool: cfg
.experimental_use_unified_exec_tool
.unwrap_or(false),
use_experimental_use_rmcp_client: cfg.experimental_use_rmcp_client.unwrap_or(false),
include_view_image_tool,
active_profile: active_profile_name,
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
tui_notifications: cfg
.tui
.as_ref()
.map(|t| t.notifications.clone())
.unwrap_or_default(),
};
Ok(config)
}
fn load_instructions(codex_dir: Option<&Path>) -> Option<String> {
let mut p = match codex_dir {
Some(p) => p.to_path_buf(),
None => return None,
};
p.push("AGENTS.md");
std::fs::read_to_string(&p).ok().and_then(|s| {
let s = s.trim();
if s.is_empty() {
None
} else {
Some(s.to_string())
}
})
}
fn get_base_instructions(
path: Option<&PathBuf>,
cwd: &Path,
) -> std::io::Result<Option<String>> {
let p = match path.as_ref() {
None => return Ok(None),
Some(p) => p,
};
// Resolve relative paths against the provided cwd to make CLI
// overrides consistent regardless of where the process was launched
// from.
let full_path = if p.is_relative() {
cwd.join(p)
} else {
p.to_path_buf()
};
let contents = std::fs::read_to_string(&full_path).map_err(|e| {
std::io::Error::new(
e.kind(),
format!(
"failed to read experimental instructions file {}: {e}",
full_path.display()
),
)
})?;
let s = contents.trim().to_string();
if s.is_empty() {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!(
"experimental instructions file is empty: {}",
full_path.display()
),
))
} else {
Ok(Some(s))
}
}
}
fn default_model() -> String {
OPENAI_DEFAULT_MODEL.to_string()
}
fn default_review_model() -> String {
OPENAI_DEFAULT_REVIEW_MODEL.to_string()
}
/// Returns the path to the Codex configuration directory, which can be
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
/// `~/.codex`.
///
/// - If `CODEX_HOME` is set, the value will be canonicalized and this
/// function will Err if the path does not exist.
/// - If `CODEX_HOME` is not set, this function does not verify that the
/// directory exists.
pub fn find_codex_home() -> std::io::Result<PathBuf> {
// Honor the `CODEX_HOME` environment variable when it is set to allow users
// (and tests) to override the default location.
if let Ok(val) = std::env::var("CODEX_HOME")
&& !val.is_empty()
{
return PathBuf::from(val).canonicalize();
}
let mut p = home_dir().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find home directory",
)
})?;
p.push(".codex");
Ok(p)
}
/// Returns the path to the folder where Codex logs are stored. Does not verify
/// that the directory exists.
pub fn log_dir(cfg: &Config) -> std::io::Result<PathBuf> {
let mut p = cfg.codex_home.clone();
p.push("log");
Ok(p)
}
#[cfg(test)]
mod tests {
use crate::config_types::HistoryPersistence;
use crate::config_types::Notifications;
use super::*;
use pretty_assertions::assert_eq;
use std::time::Duration;
use tempfile::TempDir;
#[test]
fn test_toml_parsing() {
let history_with_persistence = r#"
[history]
persistence = "save-all"
"#;
let history_with_persistence_cfg = toml::from_str::<ConfigToml>(history_with_persistence)
.expect("TOML deserialization should succeed");
assert_eq!(
Some(History {
persistence: HistoryPersistence::SaveAll,
max_bytes: None,
}),
history_with_persistence_cfg.history
);
let history_no_persistence = r#"
[history]
persistence = "none"
"#;
let history_no_persistence_cfg = toml::from_str::<ConfigToml>(history_no_persistence)
.expect("TOML deserialization should succeed");
assert_eq!(
Some(History {
persistence: HistoryPersistence::None,
max_bytes: None,
}),
history_no_persistence_cfg.history
);
}
#[test]
fn tui_config_missing_notifications_field_defaults_to_disabled() {
let cfg = r#"
[tui]
"#;
let parsed = toml::from_str::<ConfigToml>(cfg)
.expect("TUI config without notifications should succeed");
let tui = parsed.tui.expect("config should include tui section");
assert_eq!(tui.notifications, Notifications::Enabled(false));
}
#[test]
fn test_sandbox_config_parsing() {
let sandbox_full_access = r#"
sandbox_mode = "danger-full-access"
[sandbox_workspace_write]
network_access = false # This should be ignored.
"#;
let sandbox_full_access_cfg = toml::from_str::<ConfigToml>(sandbox_full_access)
.expect("TOML deserialization should succeed");
let sandbox_mode_override = None;
assert_eq!(
SandboxPolicy::DangerFullAccess,
sandbox_full_access_cfg.derive_sandbox_policy(sandbox_mode_override)
);
let sandbox_read_only = r#"
sandbox_mode = "read-only"
[sandbox_workspace_write]
network_access = true # This should be ignored.
"#;
let sandbox_read_only_cfg = toml::from_str::<ConfigToml>(sandbox_read_only)
.expect("TOML deserialization should succeed");
let sandbox_mode_override = None;
assert_eq!(
SandboxPolicy::ReadOnly,
sandbox_read_only_cfg.derive_sandbox_policy(sandbox_mode_override)
);
let sandbox_workspace_write = r#"
sandbox_mode = "workspace-write"
[sandbox_workspace_write]
writable_roots = [
"/my/workspace",
]
exclude_tmpdir_env_var = true
exclude_slash_tmp = true
"#;
let sandbox_workspace_write_cfg = toml::from_str::<ConfigToml>(sandbox_workspace_write)
.expect("TOML deserialization should succeed");
let sandbox_mode_override = None;
assert_eq!(
SandboxPolicy::WorkspaceWrite {
writable_roots: vec![PathBuf::from("/my/workspace")],
network_access: false,
exclude_tmpdir_env_var: true,
exclude_slash_tmp: true,
},
sandbox_workspace_write_cfg.derive_sandbox_policy(sandbox_mode_override)
);
}
#[test]
fn load_global_mcp_servers_returns_empty_if_missing() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let servers = load_global_mcp_servers(codex_home.path())?;
assert!(servers.is_empty());
Ok(())
}
#[test]
fn write_global_mcp_servers_round_trips_entries() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let mut servers = BTreeMap::new();
servers.insert(
"docs".to_string(),
McpServerConfig {
transport: McpServerTransportConfig::Stdio {
command: "echo".to_string(),
args: vec!["hello".to_string()],
env: None,
},
startup_timeout_sec: Some(Duration::from_secs(3)),
tool_timeout_sec: Some(Duration::from_secs(5)),
},
);
write_global_mcp_servers(codex_home.path(), &servers)?;
let loaded = load_global_mcp_servers(codex_home.path())?;
assert_eq!(loaded.len(), 1);
let docs = loaded.get("docs").expect("docs entry");
match &docs.transport {
McpServerTransportConfig::Stdio { command, args, env } => {
assert_eq!(command, "echo");
assert_eq!(args, &vec!["hello".to_string()]);
assert!(env.is_none());
}
other => panic!("unexpected transport {other:?}"),
}
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(3)));
assert_eq!(docs.tool_timeout_sec, Some(Duration::from_secs(5)));
let empty = BTreeMap::new();
write_global_mcp_servers(codex_home.path(), &empty)?;
let loaded = load_global_mcp_servers(codex_home.path())?;
assert!(loaded.is_empty());
Ok(())
}
#[test]
fn load_global_mcp_servers_accepts_legacy_ms_field() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
std::fs::write(
&config_path,
r#"
[mcp_servers]
[mcp_servers.docs]
command = "echo"
startup_timeout_ms = 2500
"#,
)?;
let servers = load_global_mcp_servers(codex_home.path())?;
let docs = servers.get("docs").expect("docs entry");
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_millis(2500)));
Ok(())
}
#[test]
fn write_global_mcp_servers_serializes_env_sorted() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let servers = BTreeMap::from([(
"docs".to_string(),
McpServerConfig {
transport: McpServerTransportConfig::Stdio {
command: "docs-server".to_string(),
args: vec!["--verbose".to_string()],
env: Some(HashMap::from([
("ZIG_VAR".to_string(), "3".to_string()),
("ALPHA_VAR".to_string(), "1".to_string()),
])),
},
startup_timeout_sec: None,
tool_timeout_sec: None,
},
)]);
write_global_mcp_servers(codex_home.path(), &servers)?;
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
let serialized = std::fs::read_to_string(&config_path)?;
assert_eq!(
serialized,
r#"[mcp_servers.docs]
command = "docs-server"
args = ["--verbose"]
[mcp_servers.docs.env]
ALPHA_VAR = "1"
ZIG_VAR = "3"
"#
);
let loaded = load_global_mcp_servers(codex_home.path())?;
let docs = loaded.get("docs").expect("docs entry");
match &docs.transport {
McpServerTransportConfig::Stdio { command, args, env } => {
assert_eq!(command, "docs-server");
assert_eq!(args, &vec!["--verbose".to_string()]);
let env = env
.as_ref()
.expect("env should be preserved for stdio transport");
assert_eq!(env.get("ALPHA_VAR"), Some(&"1".to_string()));
assert_eq!(env.get("ZIG_VAR"), Some(&"3".to_string()));
}
other => panic!("unexpected transport {other:?}"),
}
Ok(())
}
#[test]
fn write_global_mcp_servers_serializes_streamable_http() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let mut servers = BTreeMap::from([(
"docs".to_string(),
McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
url: "https://example.com/mcp".to_string(),
bearer_token: Some("secret-token".to_string()),
},
startup_timeout_sec: Some(Duration::from_secs(2)),
tool_timeout_sec: None,
},
)]);
write_global_mcp_servers(codex_home.path(), &servers)?;
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
let serialized = std::fs::read_to_string(&config_path)?;
assert_eq!(
serialized,
r#"[mcp_servers.docs]
url = "https://example.com/mcp"
bearer_token = "secret-token"
startup_timeout_sec = 2.0
"#
);
let loaded = load_global_mcp_servers(codex_home.path())?;
let docs = loaded.get("docs").expect("docs entry");
match &docs.transport {
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
assert_eq!(url, "https://example.com/mcp");
assert_eq!(bearer_token.as_deref(), Some("secret-token"));
}
other => panic!("unexpected transport {other:?}"),
}
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(2)));
servers.insert(
"docs".to_string(),
McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
url: "https://example.com/mcp".to_string(),
bearer_token: None,
},
startup_timeout_sec: None,
tool_timeout_sec: None,
},
);
write_global_mcp_servers(codex_home.path(), &servers)?;
let serialized = std::fs::read_to_string(&config_path)?;
assert_eq!(
serialized,
r#"[mcp_servers.docs]
url = "https://example.com/mcp"
"#
);
let loaded = load_global_mcp_servers(codex_home.path())?;
let docs = loaded.get("docs").expect("docs entry");
match &docs.transport {
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
assert_eq!(url, "https://example.com/mcp");
assert!(bearer_token.is_none());
}
other => panic!("unexpected transport {other:?}"),
}
Ok(())
}
#[tokio::test]
async fn persist_model_selection_updates_defaults() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
persist_model_selection(
codex_home.path(),
None,
"gpt-5-codex",
Some(ReasoningEffort::High),
)
.await?;
let serialized =
tokio::fs::read_to_string(codex_home.path().join(CONFIG_TOML_FILE)).await?;
let parsed: ConfigToml = toml::from_str(&serialized)?;
assert_eq!(parsed.model.as_deref(), Some("gpt-5-codex"));
assert_eq!(parsed.model_reasoning_effort, Some(ReasoningEffort::High));
Ok(())
}
#[tokio::test]
async fn persist_model_selection_overwrites_existing_model() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
tokio::fs::write(
&config_path,
r#"
model = "gpt-5-codex"
model_reasoning_effort = "medium"
[profiles.dev]
model = "gpt-4.1"
"#,
)
.await?;
persist_model_selection(
codex_home.path(),
None,
"o4-mini",
Some(ReasoningEffort::High),
)
.await?;
let serialized = tokio::fs::read_to_string(config_path).await?;
let parsed: ConfigToml = toml::from_str(&serialized)?;
assert_eq!(parsed.model.as_deref(), Some("o4-mini"));
assert_eq!(parsed.model_reasoning_effort, Some(ReasoningEffort::High));
assert_eq!(
parsed
.profiles
.get("dev")
.and_then(|profile| profile.model.as_deref()),
Some("gpt-4.1"),
);
Ok(())
}
#[tokio::test]
async fn persist_model_selection_updates_profile() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
persist_model_selection(
codex_home.path(),
Some("dev"),
"gpt-5-codex",
Some(ReasoningEffort::Medium),
)
.await?;
let serialized =
tokio::fs::read_to_string(codex_home.path().join(CONFIG_TOML_FILE)).await?;
let parsed: ConfigToml = toml::from_str(&serialized)?;
let profile = parsed
.profiles
.get("dev")
.expect("profile should be created");
assert_eq!(profile.model.as_deref(), Some("gpt-5-codex"));
assert_eq!(
profile.model_reasoning_effort,
Some(ReasoningEffort::Medium)
);
Ok(())
}
#[tokio::test]
async fn persist_model_selection_updates_existing_profile() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
tokio::fs::write(
&config_path,
r#"
[profiles.dev]
model = "gpt-4"
model_reasoning_effort = "medium"
[profiles.prod]
model = "gpt-5-codex"
"#,
)
.await?;
persist_model_selection(
codex_home.path(),
Some("dev"),
"o4-high",
Some(ReasoningEffort::Medium),
)
.await?;
let serialized = tokio::fs::read_to_string(config_path).await?;
let parsed: ConfigToml = toml::from_str(&serialized)?;
let dev_profile = parsed
.profiles
.get("dev")
.expect("dev profile should survive updates");
assert_eq!(dev_profile.model.as_deref(), Some("o4-high"));
assert_eq!(
dev_profile.model_reasoning_effort,
Some(ReasoningEffort::Medium)
);
assert_eq!(
parsed
.profiles
.get("prod")
.and_then(|profile| profile.model.as_deref()),
Some("gpt-5-codex"),
);
Ok(())
}
struct PrecedenceTestFixture {
cwd: TempDir,
codex_home: TempDir,
cfg: ConfigToml,
model_provider_map: HashMap<String, ModelProviderInfo>,
openai_provider: ModelProviderInfo,
openai_chat_completions_provider: ModelProviderInfo,
}
impl PrecedenceTestFixture {
fn cwd(&self) -> PathBuf {
self.cwd.path().to_path_buf()
}
fn codex_home(&self) -> PathBuf {
self.codex_home.path().to_path_buf()
}
}
fn create_test_fixture() -> std::io::Result<PrecedenceTestFixture> {
let toml = r#"
model = "o3"
approval_policy = "untrusted"
# Can be used to determine which profile to use if not specified by
# `ConfigOverrides`.
profile = "gpt3"
[model_providers.openai-chat-completions]
name = "OpenAI using Chat Completions"
base_url = "https://api.openai.com/v1"
env_key = "OPENAI_API_KEY"
wire_api = "chat"
request_max_retries = 4 # retry failed HTTP requests
stream_max_retries = 10 # retry dropped SSE streams
stream_idle_timeout_ms = 300000 # 5m idle timeout
[profiles.o3]
model = "o3"
model_provider = "openai"
approval_policy = "never"
model_reasoning_effort = "high"
model_reasoning_summary = "detailed"
[profiles.gpt3]
model = "gpt-3.5-turbo"
model_provider = "openai-chat-completions"
[profiles.zdr]
model = "o3"
model_provider = "openai"
approval_policy = "on-failure"
[profiles.gpt5]
model = "gpt-5"
model_provider = "openai"
approval_policy = "on-failure"
model_reasoning_effort = "high"
model_reasoning_summary = "detailed"
model_verbosity = "high"
"#;
let cfg: ConfigToml = toml::from_str(toml).expect("TOML deserialization should succeed");
// Use a temporary directory for the cwd so it does not contain an
// AGENTS.md file.
let cwd_temp_dir = TempDir::new().unwrap();
let cwd = cwd_temp_dir.path().to_path_buf();
// Make it look like a Git repo so it does not search for AGENTS.md in
// a parent folder, either.
std::fs::write(cwd.join(".git"), "gitdir: nowhere")?;
let codex_home_temp_dir = TempDir::new().unwrap();
let openai_chat_completions_provider = ModelProviderInfo {
name: "OpenAI using Chat Completions".to_string(),
base_url: Some("https://api.openai.com/v1".to_string()),
env_key: Some("OPENAI_API_KEY".to_string()),
wire_api: crate::WireApi::Chat,
env_key_instructions: None,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: Some(4),
stream_max_retries: Some(10),
stream_idle_timeout_ms: Some(300_000),
requires_openai_auth: false,
};
let model_provider_map = {
let mut model_provider_map = built_in_model_providers();
model_provider_map.insert(
"openai-chat-completions".to_string(),
openai_chat_completions_provider.clone(),
);
model_provider_map
};
let openai_provider = model_provider_map
.get("openai")
.expect("openai provider should exist")
.clone();
Ok(PrecedenceTestFixture {
cwd: cwd_temp_dir,
codex_home: codex_home_temp_dir,
cfg,
model_provider_map,
openai_provider,
openai_chat_completions_provider,
})
}
/// Users can specify config values at multiple levels that have the
/// following precedence:
///
/// 1. custom command-line argument, e.g. `--model o3`
/// 2. as part of a profile, where the `--profile` is specified via a CLI
/// (or in the config file itself)
/// 3. as an entry in `config.toml`, e.g. `model = "o3"`
/// 4. the default value for a required field defined in code, e.g.,
/// `crate::flags::OPENAI_DEFAULT_MODEL`
///
/// Note that profiles are the recommended way to specify a group of
/// configuration options together.
#[test]
fn test_precedence_fixture_with_o3_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let o3_profile_overrides = ConfigOverrides {
config_profile: Some("o3".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let o3_profile_config: Config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
o3_profile_overrides,
fixture.codex_home(),
)?;
assert_eq!(
Config {
model: "o3".to_string(),
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
model_family: find_family_for_model("o3").expect("known model slug"),
model_context_window: Some(200_000),
model_max_output_tokens: Some(100_000),
model_auto_compact_token_limit: None,
model_provider_id: "openai".to_string(),
model_provider: fixture.openai_provider.clone(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
user_instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
show_raw_agent_reasoning: false,
model_reasoning_effort: Some(ReasoningEffort::High),
model_reasoning_summary: ReasoningSummary::Detailed,
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("o3".to_string()),
disable_paste_burst: false,
tui_notifications: Default::default(),
},
o3_profile_config
);
Ok(())
}
#[test]
fn test_precedence_fixture_with_gpt3_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let gpt3_profile_overrides = ConfigOverrides {
config_profile: Some("gpt3".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let gpt3_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
gpt3_profile_overrides,
fixture.codex_home(),
)?;
let expected_gpt3_profile_config = Config {
model: "gpt-3.5-turbo".to_string(),
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
model_family: find_family_for_model("gpt-3.5-turbo").expect("known model slug"),
model_context_window: Some(16_385),
model_max_output_tokens: Some(4_096),
model_auto_compact_token_limit: None,
model_provider_id: "openai-chat-completions".to_string(),
model_provider: fixture.openai_chat_completions_provider.clone(),
approval_policy: AskForApproval::UnlessTrusted,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
user_instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
show_raw_agent_reasoning: false,
model_reasoning_effort: None,
model_reasoning_summary: ReasoningSummary::default(),
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("gpt3".to_string()),
disable_paste_burst: false,
tui_notifications: Default::default(),
};
assert_eq!(expected_gpt3_profile_config, gpt3_profile_config);
// Verify that loading without specifying a profile in ConfigOverrides
// uses the default profile from the config file (which is "gpt3").
let default_profile_overrides = ConfigOverrides {
cwd: Some(fixture.cwd()),
..Default::default()
};
let default_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
default_profile_overrides,
fixture.codex_home(),
)?;
assert_eq!(expected_gpt3_profile_config, default_profile_config);
Ok(())
}
#[test]
fn test_precedence_fixture_with_zdr_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let zdr_profile_overrides = ConfigOverrides {
config_profile: Some("zdr".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let zdr_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
zdr_profile_overrides,
fixture.codex_home(),
)?;
let expected_zdr_profile_config = Config {
model: "o3".to_string(),
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
model_family: find_family_for_model("o3").expect("known model slug"),
model_context_window: Some(200_000),
model_max_output_tokens: Some(100_000),
model_auto_compact_token_limit: None,
model_provider_id: "openai".to_string(),
model_provider: fixture.openai_provider.clone(),
approval_policy: AskForApproval::OnFailure,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
user_instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
show_raw_agent_reasoning: false,
model_reasoning_effort: None,
model_reasoning_summary: ReasoningSummary::default(),
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("zdr".to_string()),
disable_paste_burst: false,
tui_notifications: Default::default(),
};
assert_eq!(expected_zdr_profile_config, zdr_profile_config);
Ok(())
}
#[test]
fn test_precedence_fixture_with_gpt5_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let gpt5_profile_overrides = ConfigOverrides {
config_profile: Some("gpt5".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let gpt5_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
gpt5_profile_overrides,
fixture.codex_home(),
)?;
let expected_gpt5_profile_config = Config {
model: "gpt-5".to_string(),
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
model_family: find_family_for_model("gpt-5").expect("known model slug"),
model_context_window: Some(272_000),
model_max_output_tokens: Some(128_000),
model_auto_compact_token_limit: None,
model_provider_id: "openai".to_string(),
model_provider: fixture.openai_provider.clone(),
approval_policy: AskForApproval::OnFailure,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
user_instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
show_raw_agent_reasoning: false,
model_reasoning_effort: Some(ReasoningEffort::High),
model_reasoning_summary: ReasoningSummary::Detailed,
model_verbosity: Some(Verbosity::High),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("gpt5".to_string()),
disable_paste_burst: false,
tui_notifications: Default::default(),
};
assert_eq!(expected_gpt5_profile_config, gpt5_profile_config);
Ok(())
}
#[test]
fn test_set_project_trusted_writes_explicit_tables() -> anyhow::Result<()> {
let project_dir = Path::new("/some/path");
let mut doc = DocumentMut::new();
set_project_trusted_inner(&mut doc, project_dir)?;
let contents = doc.to_string();
let raw_path = project_dir.to_string_lossy();
let path_str = if raw_path.contains('\\') {
format!("'{raw_path}'")
} else {
format!("\"{raw_path}\"")
};
let expected = format!(
r#"[projects.{path_str}]
trust_level = "trusted"
"#
);
assert_eq!(contents, expected);
Ok(())
}
#[test]
fn test_set_project_trusted_converts_inline_to_explicit() -> anyhow::Result<()> {
let project_dir = Path::new("/some/path");
// Seed config.toml with an inline project entry under [projects]
let raw_path = project_dir.to_string_lossy();
let path_str = if raw_path.contains('\\') {
format!("'{raw_path}'")
} else {
format!("\"{raw_path}\"")
};
// Use a quoted key so backslashes don't require escaping on Windows
let initial = format!(
r#"[projects]
{path_str} = {{ trust_level = "untrusted" }}
"#
);
let mut doc = initial.parse::<DocumentMut>()?;
// Run the function; it should convert to explicit tables and set trusted
set_project_trusted_inner(&mut doc, project_dir)?;
let contents = doc.to_string();
// Assert exact output after conversion to explicit table
let expected = format!(
r#"[projects]
[projects.{path_str}]
trust_level = "trusted"
"#
);
assert_eq!(contents, expected);
Ok(())
}
#[test]
fn test_set_project_trusted_migrates_top_level_inline_projects_preserving_entries()
-> anyhow::Result<()> {
let initial = r#"toplevel = "baz"
projects = { "/Users/mbolin/code/codex4" = { trust_level = "trusted", foo = "bar" } , "/Users/mbolin/code/codex3" = { trust_level = "trusted" } }
model = "foo""#;
let mut doc = initial.parse::<DocumentMut>()?;
// Approve a new directory
let new_project = Path::new("/Users/mbolin/code/codex2");
set_project_trusted_inner(&mut doc, new_project)?;
let contents = doc.to_string();
// Since we created the [projects] table as part of migration, it is kept implicit.
// Expect explicit per-project tables, preserving prior entries and appending the new one.
let expected = r#"toplevel = "baz"
model = "foo"
[projects."/Users/mbolin/code/codex4"]
trust_level = "trusted"
foo = "bar"
[projects."/Users/mbolin/code/codex3"]
trust_level = "trusted"
[projects."/Users/mbolin/code/codex2"]
trust_level = "trusted"
"#;
assert_eq!(contents, expected);
Ok(())
}
}
#[cfg(test)]
mod notifications_tests {
use crate::config_types::Notifications;
use serde::Deserialize;
#[derive(Deserialize, Debug, PartialEq)]
struct TuiTomlTest {
notifications: Notifications,
}
#[derive(Deserialize, Debug, PartialEq)]
struct RootTomlTest {
tui: TuiTomlTest,
}
#[test]
fn test_tui_notifications_true() {
let toml = r#"
[tui]
notifications = true
"#;
let parsed: RootTomlTest = toml::from_str(toml).expect("deserialize notifications=true");
assert!(matches!(
parsed.tui.notifications,
Notifications::Enabled(true)
));
}
#[test]
fn test_tui_notifications_custom_array() {
let toml = r#"
[tui]
notifications = ["foo"]
"#;
let parsed: RootTomlTest =
toml::from_str(toml).expect("deserialize notifications=[\"foo\"]");
assert!(matches!(
parsed.tui.notifications,
Notifications::Custom(ref v) if v == &vec!["foo".to_string()]
));
}
}