Files
llmx/codex-rs/core/src/config.rs
Michael Bolin 86d5a9d80d chore: rename unless-allow-listed to untrusted (#1378)
For the `approval_policy` config option, renames `unless-allow-listed`
to `untrusted`. In general, when it comes to exec'ing commands, I think
"trusted" is a more accurate term than "safe."

Also drops the `AskForApproval::AutoEdit` variant, as we were not really
making use of it, anyway.

Fixes https://github.com/openai/codex/issues/1250.


---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/1378).
* #1379
* __->__ #1378
2025-06-24 22:19:21 -07:00

816 lines
29 KiB
Rust

use crate::config_profile::ConfigProfile;
use crate::config_types::History;
use crate::config_types::McpServerConfig;
use crate::config_types::ReasoningEffort;
use crate::config_types::ReasoningSummary;
use crate::config_types::ShellEnvironmentPolicy;
use crate::config_types::ShellEnvironmentPolicyToml;
use crate::config_types::Tui;
use crate::config_types::UriBasedFileOpener;
use crate::flags::OPENAI_DEFAULT_MODEL;
use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::built_in_model_providers;
use crate::protocol::AskForApproval;
use crate::protocol::SandboxPolicy;
use dirs::home_dir;
use serde::Deserialize;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use toml::Value as TomlValue;
/// Maximum number of bytes of the documentation that will be embedded. Larger
/// files are *silently truncated* to this size so we do not take up too much of
/// the context window.
pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB
/// Application configuration loaded from disk and merged with overrides.
#[derive(Debug, Clone, PartialEq)]
pub struct Config {
/// Optional override of model selection.
pub model: String,
/// Key into the model_providers map that specifies which provider to use.
pub model_provider_id: String,
/// Info needed to make an API request to the model.
pub model_provider: ModelProviderInfo,
/// Approval policy for executing commands.
pub approval_policy: AskForApproval,
pub sandbox_policy: SandboxPolicy,
pub shell_environment_policy: ShellEnvironmentPolicy,
/// When `true`, `AgentReasoning` events emitted by the backend will be
/// suppressed from the frontend output. This can reduce visual noise when
/// users are only interested in the final agent responses.
pub hide_agent_reasoning: bool,
/// Disable server-side response storage (sends the full conversation
/// context with every request). Currently necessary for OpenAI customers
/// who have opted into Zero Data Retention (ZDR).
pub disable_response_storage: bool,
/// User-provided instructions from instructions.md.
pub instructions: Option<String>,
/// Optional external notifier command. When set, Codex will spawn this
/// program after each completed *turn* (i.e. when the agent finishes
/// processing a user submission). The value must be the full command
/// broken into argv tokens **without** the trailing JSON argument - Codex
/// appends one extra argument containing a JSON payload describing the
/// event.
///
/// Example `~/.codex/config.toml` snippet:
///
/// ```toml
/// notify = ["notify-send", "Codex"]
/// ```
///
/// which will be invoked as:
///
/// ```shell
/// notify-send Codex '{"type":"agent-turn-complete","turn-id":"12345"}'
/// ```
///
/// If unset the feature is disabled.
pub notify: Option<Vec<String>>,
/// The directory that should be treated as the current working directory
/// for the session. All relative paths inside the business-logic layer are
/// resolved against this path.
pub cwd: PathBuf,
/// Definition for MCP servers that Codex can reach out to for tool calls.
pub mcp_servers: HashMap<String, McpServerConfig>,
/// Combined provider map (defaults merged with user-defined overrides).
pub model_providers: HashMap<String, ModelProviderInfo>,
/// Maximum number of bytes to include from an AGENTS.md project doc file.
pub project_doc_max_bytes: usize,
/// Directory containing all Codex state (defaults to `~/.codex` but can be
/// overridden by the `CODEX_HOME` environment variable).
pub codex_home: PathBuf,
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
pub history: History,
/// Optional URI-based file opener. If set, citations to files in the model
/// output will be hyperlinked using the specified URI scheme.
pub file_opener: UriBasedFileOpener,
/// Collection of settings that are specific to the TUI.
pub tui: Tui,
/// Path to the `codex-linux-sandbox` executable. This must be set if
/// [`crate::exec::SandboxType::LinuxSeccomp`] is used. Note that this
/// cannot be set in the config file: it must be set in code via
/// [`ConfigOverrides`].
///
/// When this program is invoked, arg0 will be set to `codex-linux-sandbox`.
pub codex_linux_sandbox_exe: Option<PathBuf>,
/// If not "none", the value to use for `reasoning.effort` when making a
/// request using the Responses API.
pub model_reasoning_effort: ReasoningEffort,
/// If not "none", the value to use for `reasoning.summary` when making a
/// request using the Responses API.
pub model_reasoning_summary: ReasoningSummary,
}
impl Config {
/// Load configuration with *generic* CLI overrides (`-c key=value`) applied
/// **in between** the values parsed from `config.toml` and the
/// strongly-typed overrides specified via [`ConfigOverrides`].
///
/// The precedence order is therefore: `config.toml` < `-c` overrides <
/// `ConfigOverrides`.
pub fn load_with_cli_overrides(
cli_overrides: Vec<(String, TomlValue)>,
overrides: ConfigOverrides,
) -> std::io::Result<Self> {
// Resolve the directory that stores Codex state (e.g. ~/.codex or the
// value of $CODEX_HOME) so we can embed it into the resulting
// `Config` instance.
let codex_home = find_codex_home()?;
// Step 1: parse `config.toml` into a generic JSON value.
let mut root_value = load_config_as_toml(&codex_home)?;
// Step 2: apply the `-c` overrides.
for (path, value) in cli_overrides.into_iter() {
apply_toml_override(&mut root_value, &path, value);
}
// Step 3: deserialize into `ConfigToml` so that Serde can enforce the
// correct types.
let cfg: ConfigToml = root_value.try_into().map_err(|e| {
tracing::error!("Failed to deserialize overridden config: {e}");
std::io::Error::new(std::io::ErrorKind::InvalidData, e)
})?;
// Step 4: merge with the strongly-typed overrides.
Self::load_from_base_config_with_overrides(cfg, overrides, codex_home)
}
}
/// Read `CODEX_HOME/config.toml` and return it as a generic TOML value. Returns
/// an empty TOML table when the file does not exist.
fn load_config_as_toml(codex_home: &Path) -> std::io::Result<TomlValue> {
let config_path = codex_home.join("config.toml");
match std::fs::read_to_string(&config_path) {
Ok(contents) => match toml::from_str::<TomlValue>(&contents) {
Ok(val) => Ok(val),
Err(e) => {
tracing::error!("Failed to parse config.toml: {e}");
Err(std::io::Error::new(std::io::ErrorKind::InvalidData, e))
}
},
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
tracing::info!("config.toml not found, using defaults");
Ok(TomlValue::Table(Default::default()))
}
Err(e) => {
tracing::error!("Failed to read config.toml: {e}");
Err(e)
}
}
}
/// Apply a single dotted-path override onto a TOML value.
fn apply_toml_override(root: &mut TomlValue, path: &str, value: TomlValue) {
use toml::value::Table;
let segments: Vec<&str> = path.split('.').collect();
let mut current = root;
for (idx, segment) in segments.iter().enumerate() {
let is_last = idx == segments.len() - 1;
if is_last {
match current {
TomlValue::Table(table) => {
table.insert(segment.to_string(), value);
}
_ => {
let mut table = Table::new();
table.insert(segment.to_string(), value);
*current = TomlValue::Table(table);
}
}
return;
}
// Traverse or create intermediate object.
match current {
TomlValue::Table(table) => {
current = table
.entry(segment.to_string())
.or_insert_with(|| TomlValue::Table(Table::new()));
}
_ => {
*current = TomlValue::Table(Table::new());
if let TomlValue::Table(tbl) = current {
current = tbl
.entry(segment.to_string())
.or_insert_with(|| TomlValue::Table(Table::new()));
}
}
}
}
}
/// Base config deserialized from ~/.codex/config.toml.
#[derive(Deserialize, Debug, Clone, Default)]
pub struct ConfigToml {
/// Optional override of model selection.
pub model: Option<String>,
/// Provider to use from the model_providers map.
pub model_provider: Option<String>,
/// Default approval policy for executing commands.
pub approval_policy: Option<AskForApproval>,
#[serde(default)]
pub shell_environment_policy: ShellEnvironmentPolicyToml,
/// If omitted, Codex defaults to the restrictive `read-only` policy.
pub sandbox: Option<SandboxPolicy>,
/// Disable server-side response storage (sends the full conversation
/// context with every request). Currently necessary for OpenAI customers
/// who have opted into Zero Data Retention (ZDR).
pub disable_response_storage: Option<bool>,
/// Optional external command to spawn for end-user notifications.
#[serde(default)]
pub notify: Option<Vec<String>>,
/// System instructions.
pub instructions: Option<String>,
/// Definition for MCP servers that Codex can reach out to for tool calls.
#[serde(default)]
pub mcp_servers: HashMap<String, McpServerConfig>,
/// User-defined provider entries that extend/override the built-in list.
#[serde(default)]
pub model_providers: HashMap<String, ModelProviderInfo>,
/// Maximum number of bytes to include from an AGENTS.md project doc file.
pub project_doc_max_bytes: Option<usize>,
/// Profile to use from the `profiles` map.
pub profile: Option<String>,
/// Named profiles to facilitate switching between different configurations.
#[serde(default)]
pub profiles: HashMap<String, ConfigProfile>,
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
#[serde(default)]
pub history: Option<History>,
/// Optional URI-based file opener. If set, citations to files in the model
/// output will be hyperlinked using the specified URI scheme.
pub file_opener: Option<UriBasedFileOpener>,
/// Collection of settings that are specific to the TUI.
pub tui: Option<Tui>,
/// When set to `true`, `AgentReasoning` events will be hidden from the
/// UI/output. Defaults to `false`.
pub hide_agent_reasoning: Option<bool>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
}
/// Optional overrides for user configuration (e.g., from CLI flags).
#[derive(Default, Debug, Clone)]
pub struct ConfigOverrides {
pub model: Option<String>,
pub cwd: Option<PathBuf>,
pub approval_policy: Option<AskForApproval>,
pub sandbox_policy: Option<SandboxPolicy>,
pub model_provider: Option<String>,
pub config_profile: Option<String>,
pub codex_linux_sandbox_exe: Option<PathBuf>,
}
impl Config {
/// Meant to be used exclusively for tests: `load_with_overrides()` should
/// be used in all other cases.
pub fn load_from_base_config_with_overrides(
cfg: ConfigToml,
overrides: ConfigOverrides,
codex_home: PathBuf,
) -> std::io::Result<Self> {
let instructions = Self::load_instructions(Some(&codex_home));
// Destructure ConfigOverrides fully to ensure all overrides are applied.
let ConfigOverrides {
model,
cwd,
approval_policy,
sandbox_policy,
model_provider,
config_profile: config_profile_key,
codex_linux_sandbox_exe,
} = overrides;
let config_profile = match config_profile_key.or(cfg.profile) {
Some(key) => cfg
.profiles
.get(&key)
.ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("config profile `{key}` not found"),
)
})?
.clone(),
None => ConfigProfile::default(),
};
let sandbox_policy = sandbox_policy.unwrap_or_else(|| {
cfg.sandbox
.unwrap_or_else(SandboxPolicy::new_read_only_policy)
});
let mut model_providers = built_in_model_providers();
// Merge user-defined providers into the built-in list.
for (key, provider) in cfg.model_providers.into_iter() {
model_providers.entry(key).or_insert(provider);
}
let model_provider_id = model_provider
.or(config_profile.model_provider)
.or(cfg.model_provider)
.unwrap_or_else(|| "openai".to_string());
let model_provider = model_providers
.get(&model_provider_id)
.ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("Model provider `{model_provider_id}` not found"),
)
})?
.clone();
let shell_environment_policy = cfg.shell_environment_policy.into();
let resolved_cwd = {
use std::env;
match cwd {
None => {
tracing::info!("cwd not set, using current dir");
env::current_dir()?
}
Some(p) if p.is_absolute() => p,
Some(p) => {
// Resolve relative path against the current working directory.
tracing::info!("cwd is relative, resolving against current dir");
let mut current = env::current_dir()?;
current.push(p);
current
}
}
};
let history = cfg.history.unwrap_or_default();
let config = Self {
model: model
.or(config_profile.model)
.or(cfg.model)
.unwrap_or_else(default_model),
model_provider_id,
model_provider,
cwd: resolved_cwd,
approval_policy: approval_policy
.or(config_profile.approval_policy)
.or(cfg.approval_policy)
.unwrap_or_else(AskForApproval::default),
sandbox_policy,
shell_environment_policy,
disable_response_storage: config_profile
.disable_response_storage
.or(cfg.disable_response_storage)
.unwrap_or(false),
notify: cfg.notify,
instructions,
mcp_servers: cfg.mcp_servers,
model_providers,
project_doc_max_bytes: cfg.project_doc_max_bytes.unwrap_or(PROJECT_DOC_MAX_BYTES),
codex_home,
history,
file_opener: cfg.file_opener.unwrap_or(UriBasedFileOpener::VsCode),
tui: cfg.tui.unwrap_or_default(),
codex_linux_sandbox_exe,
hide_agent_reasoning: cfg.hide_agent_reasoning.unwrap_or(false),
model_reasoning_effort: cfg.model_reasoning_effort.unwrap_or_default(),
model_reasoning_summary: cfg.model_reasoning_summary.unwrap_or_default(),
};
Ok(config)
}
fn load_instructions(codex_dir: Option<&Path>) -> Option<String> {
let mut p = match codex_dir {
Some(p) => p.to_path_buf(),
None => return None,
};
p.push("instructions.md");
std::fs::read_to_string(&p).ok().and_then(|s| {
let s = s.trim();
if s.is_empty() {
None
} else {
Some(s.to_string())
}
})
}
}
fn default_model() -> String {
OPENAI_DEFAULT_MODEL.to_string()
}
/// Returns the path to the Codex configuration directory, which can be
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
/// `~/.codex`.
///
/// - If `CODEX_HOME` is set, the value will be canonicalized and this
/// function will Err if the path does not exist.
/// - If `CODEX_HOME` is not set, this function does not verify that the
/// directory exists.
fn find_codex_home() -> std::io::Result<PathBuf> {
// Honor the `CODEX_HOME` environment variable when it is set to allow users
// (and tests) to override the default location.
if let Ok(val) = std::env::var("CODEX_HOME") {
if !val.is_empty() {
return PathBuf::from(val).canonicalize();
}
}
let mut p = home_dir().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find home directory",
)
})?;
p.push(".codex");
Ok(p)
}
/// Returns the path to the folder where Codex logs are stored. Does not verify
/// that the directory exists.
pub fn log_dir(cfg: &Config) -> std::io::Result<PathBuf> {
let mut p = cfg.codex_home.clone();
p.push("log");
Ok(p)
}
#[cfg(test)]
mod tests {
#![allow(clippy::expect_used, clippy::unwrap_used)]
use crate::config_types::HistoryPersistence;
use super::*;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
#[test]
fn test_toml_parsing() {
let history_with_persistence = r#"
[history]
persistence = "save-all"
"#;
let history_with_persistence_cfg = toml::from_str::<ConfigToml>(history_with_persistence)
.expect("TOML deserialization should succeed");
assert_eq!(
Some(History {
persistence: HistoryPersistence::SaveAll,
max_bytes: None,
}),
history_with_persistence_cfg.history
);
let history_no_persistence = r#"
[history]
persistence = "none"
"#;
let history_no_persistence_cfg = toml::from_str::<ConfigToml>(history_no_persistence)
.expect("TOML deserialization should succeed");
assert_eq!(
Some(History {
persistence: HistoryPersistence::None,
max_bytes: None,
}),
history_no_persistence_cfg.history
);
}
#[test]
fn test_sandbox_config_parsing() {
let sandbox_full_access = r#"
[sandbox]
mode = "danger-full-access"
network_access = false # This should be ignored.
"#;
let sandbox_full_access_cfg = toml::from_str::<ConfigToml>(sandbox_full_access)
.expect("TOML deserialization should succeed");
assert_eq!(
Some(SandboxPolicy::DangerFullAccess),
sandbox_full_access_cfg.sandbox
);
let sandbox_read_only = r#"
[sandbox]
mode = "read-only"
network_access = true # This should be ignored.
"#;
let sandbox_read_only_cfg = toml::from_str::<ConfigToml>(sandbox_read_only)
.expect("TOML deserialization should succeed");
assert_eq!(Some(SandboxPolicy::ReadOnly), sandbox_read_only_cfg.sandbox);
let sandbox_workspace_write = r#"
[sandbox]
mode = "workspace-write"
writable_roots = [
"/tmp",
]
"#;
let sandbox_workspace_write_cfg = toml::from_str::<ConfigToml>(sandbox_workspace_write)
.expect("TOML deserialization should succeed");
assert_eq!(
Some(SandboxPolicy::WorkspaceWrite {
writable_roots: vec![PathBuf::from("/tmp")],
network_access: false
}),
sandbox_workspace_write_cfg.sandbox
);
}
struct PrecedenceTestFixture {
cwd: TempDir,
codex_home: TempDir,
cfg: ConfigToml,
model_provider_map: HashMap<String, ModelProviderInfo>,
openai_provider: ModelProviderInfo,
openai_chat_completions_provider: ModelProviderInfo,
}
impl PrecedenceTestFixture {
fn cwd(&self) -> PathBuf {
self.cwd.path().to_path_buf()
}
fn codex_home(&self) -> PathBuf {
self.codex_home.path().to_path_buf()
}
}
fn create_test_fixture() -> std::io::Result<PrecedenceTestFixture> {
let toml = r#"
model = "o3"
approval_policy = "untrusted"
disable_response_storage = false
# Can be used to determine which profile to use if not specified by
# `ConfigOverrides`.
profile = "gpt3"
[model_providers.openai-chat-completions]
name = "OpenAI using Chat Completions"
base_url = "https://api.openai.com/v1"
env_key = "OPENAI_API_KEY"
wire_api = "chat"
[profiles.o3]
model = "o3"
model_provider = "openai"
approval_policy = "never"
[profiles.gpt3]
model = "gpt-3.5-turbo"
model_provider = "openai-chat-completions"
[profiles.zdr]
model = "o3"
model_provider = "openai"
approval_policy = "on-failure"
disable_response_storage = true
"#;
let cfg: ConfigToml = toml::from_str(toml).expect("TOML deserialization should succeed");
// Use a temporary directory for the cwd so it does not contain an
// AGENTS.md file.
let cwd_temp_dir = TempDir::new().unwrap();
let cwd = cwd_temp_dir.path().to_path_buf();
// Make it look like a Git repo so it does not search for AGENTS.md in
// a parent folder, either.
std::fs::write(cwd.join(".git"), "gitdir: nowhere")?;
let codex_home_temp_dir = TempDir::new().unwrap();
let openai_chat_completions_provider = ModelProviderInfo {
name: "OpenAI using Chat Completions".to_string(),
base_url: "https://api.openai.com/v1".to_string(),
env_key: Some("OPENAI_API_KEY".to_string()),
wire_api: crate::WireApi::Chat,
env_key_instructions: None,
};
let model_provider_map = {
let mut model_provider_map = built_in_model_providers();
model_provider_map.insert(
"openai-chat-completions".to_string(),
openai_chat_completions_provider.clone(),
);
model_provider_map
};
let openai_provider = model_provider_map
.get("openai")
.expect("openai provider should exist")
.clone();
Ok(PrecedenceTestFixture {
cwd: cwd_temp_dir,
codex_home: codex_home_temp_dir,
cfg,
model_provider_map,
openai_provider,
openai_chat_completions_provider,
})
}
/// Users can specify config values at multiple levels that have the
/// following precedence:
///
/// 1. custom command-line argument, e.g. `--model o3`
/// 2. as part of a profile, where the `--profile` is specified via a CLI
/// (or in the config file itelf)
/// 3. as an entry in `config.toml`, e.g. `model = "o3"`
/// 4. the default value for a required field defined in code, e.g.,
/// `crate::flags::OPENAI_DEFAULT_MODEL`
///
/// Note that profiles are the recommended way to specify a group of
/// configuration options together.
#[test]
fn test_precedence_fixture_with_o3_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let o3_profile_overrides = ConfigOverrides {
config_profile: Some("o3".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let o3_profile_config: Config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
o3_profile_overrides,
fixture.codex_home(),
)?;
assert_eq!(
Config {
model: "o3".to_string(),
model_provider_id: "openai".to_string(),
model_provider: fixture.openai_provider.clone(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
disable_response_storage: false,
instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
tui: Tui::default(),
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
model_reasoning_effort: ReasoningEffort::default(),
model_reasoning_summary: ReasoningSummary::default(),
},
o3_profile_config
);
Ok(())
}
#[test]
fn test_precedence_fixture_with_gpt3_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let gpt3_profile_overrides = ConfigOverrides {
config_profile: Some("gpt3".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let gpt3_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
gpt3_profile_overrides,
fixture.codex_home(),
)?;
let expected_gpt3_profile_config = Config {
model: "gpt-3.5-turbo".to_string(),
model_provider_id: "openai-chat-completions".to_string(),
model_provider: fixture.openai_chat_completions_provider.clone(),
approval_policy: AskForApproval::UnlessAllowListed,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
disable_response_storage: false,
instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
tui: Tui::default(),
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
model_reasoning_effort: ReasoningEffort::default(),
model_reasoning_summary: ReasoningSummary::default(),
};
assert_eq!(expected_gpt3_profile_config, gpt3_profile_config);
// Verify that loading without specifying a profile in ConfigOverrides
// uses the default profile from the config file (which is "gpt3").
let default_profile_overrides = ConfigOverrides {
cwd: Some(fixture.cwd()),
..Default::default()
};
let default_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
default_profile_overrides,
fixture.codex_home(),
)?;
assert_eq!(expected_gpt3_profile_config, default_profile_config);
Ok(())
}
#[test]
fn test_precedence_fixture_with_zdr_profile() -> std::io::Result<()> {
let fixture = create_test_fixture()?;
let zdr_profile_overrides = ConfigOverrides {
config_profile: Some("zdr".to_string()),
cwd: Some(fixture.cwd()),
..Default::default()
};
let zdr_profile_config = Config::load_from_base_config_with_overrides(
fixture.cfg.clone(),
zdr_profile_overrides,
fixture.codex_home(),
)?;
let expected_zdr_profile_config = Config {
model: "o3".to_string(),
model_provider_id: "openai".to_string(),
model_provider: fixture.openai_provider.clone(),
approval_policy: AskForApproval::OnFailure,
sandbox_policy: SandboxPolicy::new_read_only_policy(),
shell_environment_policy: ShellEnvironmentPolicy::default(),
disable_response_storage: true,
instructions: None,
notify: None,
cwd: fixture.cwd(),
mcp_servers: HashMap::new(),
model_providers: fixture.model_provider_map.clone(),
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
codex_home: fixture.codex_home(),
history: History::default(),
file_opener: UriBasedFileOpener::VsCode,
tui: Tui::default(),
codex_linux_sandbox_exe: None,
hide_agent_reasoning: false,
model_reasoning_effort: ReasoningEffort::default(),
model_reasoning_summary: ReasoningSummary::default(),
};
assert_eq!(expected_zdr_profile_config, zdr_profile_config);
Ok(())
}
}