use crate::config_profile::ConfigProfile; use crate::config_types::History; use crate::config_types::McpServerConfig; use crate::config_types::ReasoningEffort; use crate::config_types::ReasoningSummary; use crate::config_types::SandboxMode; use crate::config_types::SandboxWorkplaceWrite; use crate::config_types::ShellEnvironmentPolicy; use crate::config_types::ShellEnvironmentPolicyToml; use crate::config_types::Tui; use crate::config_types::UriBasedFileOpener; use crate::flags::OPENAI_DEFAULT_MODEL; use crate::model_provider_info::ModelProviderInfo; use crate::model_provider_info::built_in_model_providers; use crate::openai_model_info::get_model_info; use crate::protocol::AskForApproval; use crate::protocol::SandboxPolicy; use dirs::home_dir; use serde::Deserialize; use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; use toml::Value as TomlValue; /// Maximum number of bytes of the documentation that will be embedded. Larger /// files are *silently truncated* to this size so we do not take up too much of /// the context window. pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB /// Application configuration loaded from disk and merged with overrides. #[derive(Debug, Clone, PartialEq)] pub struct Config { /// Optional override of model selection. pub model: String, /// Size of the context window for the model, in tokens. pub model_context_window: Option, /// Maximum number of output tokens. pub model_max_output_tokens: Option, /// Key into the model_providers map that specifies which provider to use. pub model_provider_id: String, /// Info needed to make an API request to the model. pub model_provider: ModelProviderInfo, /// Approval policy for executing commands. pub approval_policy: AskForApproval, pub sandbox_policy: SandboxPolicy, pub shell_environment_policy: ShellEnvironmentPolicy, /// When `true`, `AgentReasoning` events emitted by the backend will be /// suppressed from the frontend output. This can reduce visual noise when /// users are only interested in the final agent responses. pub hide_agent_reasoning: bool, /// Disable server-side response storage (sends the full conversation /// context with every request). Currently necessary for OpenAI customers /// who have opted into Zero Data Retention (ZDR). pub disable_response_storage: bool, /// User-provided instructions from instructions.md. pub instructions: Option, /// Optional external notifier command. When set, Codex will spawn this /// program after each completed *turn* (i.e. when the agent finishes /// processing a user submission). The value must be the full command /// broken into argv tokens **without** the trailing JSON argument - Codex /// appends one extra argument containing a JSON payload describing the /// event. /// /// Example `~/.codex/config.toml` snippet: /// /// ```toml /// notify = ["notify-send", "Codex"] /// ``` /// /// which will be invoked as: /// /// ```shell /// notify-send Codex '{"type":"agent-turn-complete","turn-id":"12345"}' /// ``` /// /// If unset the feature is disabled. pub notify: Option>, /// The directory that should be treated as the current working directory /// for the session. All relative paths inside the business-logic layer are /// resolved against this path. pub cwd: PathBuf, /// Definition for MCP servers that Codex can reach out to for tool calls. pub mcp_servers: HashMap, /// Combined provider map (defaults merged with user-defined overrides). pub model_providers: HashMap, /// Maximum number of bytes to include from an AGENTS.md project doc file. pub project_doc_max_bytes: usize, /// Directory containing all Codex state (defaults to `~/.codex` but can be /// overridden by the `CODEX_HOME` environment variable). pub codex_home: PathBuf, /// Settings that govern if and what will be written to `~/.codex/history.jsonl`. pub history: History, /// Optional URI-based file opener. If set, citations to files in the model /// output will be hyperlinked using the specified URI scheme. pub file_opener: UriBasedFileOpener, /// Collection of settings that are specific to the TUI. pub tui: Tui, /// Path to the `codex-linux-sandbox` executable. This must be set if /// [`crate::exec::SandboxType::LinuxSeccomp`] is used. Note that this /// cannot be set in the config file: it must be set in code via /// [`ConfigOverrides`]. /// /// When this program is invoked, arg0 will be set to `codex-linux-sandbox`. pub codex_linux_sandbox_exe: Option, /// If not "none", the value to use for `reasoning.effort` when making a /// request using the Responses API. pub model_reasoning_effort: ReasoningEffort, /// If not "none", the value to use for `reasoning.summary` when making a /// request using the Responses API. pub model_reasoning_summary: ReasoningSummary, } impl Config { /// Load configuration with *generic* CLI overrides (`-c key=value`) applied /// **in between** the values parsed from `config.toml` and the /// strongly-typed overrides specified via [`ConfigOverrides`]. /// /// The precedence order is therefore: `config.toml` < `-c` overrides < /// `ConfigOverrides`. pub fn load_with_cli_overrides( cli_overrides: Vec<(String, TomlValue)>, overrides: ConfigOverrides, ) -> std::io::Result { // Resolve the directory that stores Codex state (e.g. ~/.codex or the // value of $CODEX_HOME) so we can embed it into the resulting // `Config` instance. let codex_home = find_codex_home()?; // Step 1: parse `config.toml` into a generic JSON value. let mut root_value = load_config_as_toml(&codex_home)?; // Step 2: apply the `-c` overrides. for (path, value) in cli_overrides.into_iter() { apply_toml_override(&mut root_value, &path, value); } // Step 3: deserialize into `ConfigToml` so that Serde can enforce the // correct types. let cfg: ConfigToml = root_value.try_into().map_err(|e| { tracing::error!("Failed to deserialize overridden config: {e}"); std::io::Error::new(std::io::ErrorKind::InvalidData, e) })?; // Step 4: merge with the strongly-typed overrides. Self::load_from_base_config_with_overrides(cfg, overrides, codex_home) } } /// Read `CODEX_HOME/config.toml` and return it as a generic TOML value. Returns /// an empty TOML table when the file does not exist. fn load_config_as_toml(codex_home: &Path) -> std::io::Result { let config_path = codex_home.join("config.toml"); match std::fs::read_to_string(&config_path) { Ok(contents) => match toml::from_str::(&contents) { Ok(val) => Ok(val), Err(e) => { tracing::error!("Failed to parse config.toml: {e}"); Err(std::io::Error::new(std::io::ErrorKind::InvalidData, e)) } }, Err(e) if e.kind() == std::io::ErrorKind::NotFound => { tracing::info!("config.toml not found, using defaults"); Ok(TomlValue::Table(Default::default())) } Err(e) => { tracing::error!("Failed to read config.toml: {e}"); Err(e) } } } /// Apply a single dotted-path override onto a TOML value. fn apply_toml_override(root: &mut TomlValue, path: &str, value: TomlValue) { use toml::value::Table; let segments: Vec<&str> = path.split('.').collect(); let mut current = root; for (idx, segment) in segments.iter().enumerate() { let is_last = idx == segments.len() - 1; if is_last { match current { TomlValue::Table(table) => { table.insert(segment.to_string(), value); } _ => { let mut table = Table::new(); table.insert(segment.to_string(), value); *current = TomlValue::Table(table); } } return; } // Traverse or create intermediate object. match current { TomlValue::Table(table) => { current = table .entry(segment.to_string()) .or_insert_with(|| TomlValue::Table(Table::new())); } _ => { *current = TomlValue::Table(Table::new()); if let TomlValue::Table(tbl) = current { current = tbl .entry(segment.to_string()) .or_insert_with(|| TomlValue::Table(Table::new())); } } } } } /// Base config deserialized from ~/.codex/config.toml. #[derive(Deserialize, Debug, Clone, Default)] pub struct ConfigToml { /// Optional override of model selection. pub model: Option, /// Provider to use from the model_providers map. pub model_provider: Option, /// Size of the context window for the model, in tokens. pub model_context_window: Option, /// Maximum number of output tokens. pub model_max_output_tokens: Option, /// Default approval policy for executing commands. pub approval_policy: Option, #[serde(default)] pub shell_environment_policy: ShellEnvironmentPolicyToml, /// Sandbox mode to use. pub sandbox_mode: Option, /// Sandbox configuration to apply if `sandbox` is `WorkspaceWrite`. pub sandbox_workspace_write: Option, /// Disable server-side response storage (sends the full conversation /// context with every request). Currently necessary for OpenAI customers /// who have opted into Zero Data Retention (ZDR). pub disable_response_storage: Option, /// Optional external command to spawn for end-user notifications. #[serde(default)] pub notify: Option>, /// System instructions. pub instructions: Option, /// Definition for MCP servers that Codex can reach out to for tool calls. #[serde(default)] pub mcp_servers: HashMap, /// User-defined provider entries that extend/override the built-in list. #[serde(default)] pub model_providers: HashMap, /// Maximum number of bytes to include from an AGENTS.md project doc file. pub project_doc_max_bytes: Option, /// Profile to use from the `profiles` map. pub profile: Option, /// Named profiles to facilitate switching between different configurations. #[serde(default)] pub profiles: HashMap, /// Settings that govern if and what will be written to `~/.codex/history.jsonl`. #[serde(default)] pub history: Option, /// Optional URI-based file opener. If set, citations to files in the model /// output will be hyperlinked using the specified URI scheme. pub file_opener: Option, /// Collection of settings that are specific to the TUI. pub tui: Option, /// When set to `true`, `AgentReasoning` events will be hidden from the /// UI/output. Defaults to `false`. pub hide_agent_reasoning: Option, pub model_reasoning_effort: Option, pub model_reasoning_summary: Option, } impl ConfigToml { /// Derive the effective sandbox policy from the configuration. fn derive_sandbox_policy(&self, sandbox_mode_override: Option) -> SandboxPolicy { let resolved_sandbox_mode = sandbox_mode_override .or(self.sandbox_mode) .unwrap_or_default(); match resolved_sandbox_mode { SandboxMode::ReadOnly => SandboxPolicy::new_read_only_policy(), SandboxMode::WorkspaceWrite => match self.sandbox_workspace_write.as_ref() { Some(s) => SandboxPolicy::WorkspaceWrite { writable_roots: s.writable_roots.clone(), network_access: s.network_access, }, None => SandboxPolicy::new_workspace_write_policy(), }, SandboxMode::DangerFullAccess => SandboxPolicy::DangerFullAccess, } } } /// Optional overrides for user configuration (e.g., from CLI flags). #[derive(Default, Debug, Clone)] pub struct ConfigOverrides { pub model: Option, pub cwd: Option, pub approval_policy: Option, pub sandbox_mode: Option, pub model_provider: Option, pub config_profile: Option, pub codex_linux_sandbox_exe: Option, } impl Config { /// Meant to be used exclusively for tests: `load_with_overrides()` should /// be used in all other cases. pub fn load_from_base_config_with_overrides( cfg: ConfigToml, overrides: ConfigOverrides, codex_home: PathBuf, ) -> std::io::Result { let instructions = Self::load_instructions(Some(&codex_home)); // Destructure ConfigOverrides fully to ensure all overrides are applied. let ConfigOverrides { model, cwd, approval_policy, sandbox_mode, model_provider, config_profile: config_profile_key, codex_linux_sandbox_exe, } = overrides; let config_profile = match config_profile_key.as_ref().or(cfg.profile.as_ref()) { Some(key) => cfg .profiles .get(key) .ok_or_else(|| { std::io::Error::new( std::io::ErrorKind::NotFound, format!("config profile `{key}` not found"), ) })? .clone(), None => ConfigProfile::default(), }; let sandbox_policy = cfg.derive_sandbox_policy(sandbox_mode); let mut model_providers = built_in_model_providers(); // Merge user-defined providers into the built-in list. for (key, provider) in cfg.model_providers.into_iter() { model_providers.entry(key).or_insert(provider); } let model_provider_id = model_provider .or(config_profile.model_provider) .or(cfg.model_provider) .unwrap_or_else(|| "openai".to_string()); let model_provider = model_providers .get(&model_provider_id) .ok_or_else(|| { std::io::Error::new( std::io::ErrorKind::NotFound, format!("Model provider `{model_provider_id}` not found"), ) })? .clone(); let shell_environment_policy = cfg.shell_environment_policy.into(); let resolved_cwd = { use std::env; match cwd { None => { tracing::info!("cwd not set, using current dir"); env::current_dir()? } Some(p) if p.is_absolute() => p, Some(p) => { // Resolve relative path against the current working directory. tracing::info!("cwd is relative, resolving against current dir"); let mut current = env::current_dir()?; current.push(p); current } } }; let history = cfg.history.unwrap_or_default(); let model = model .or(config_profile.model) .or(cfg.model) .unwrap_or_else(default_model); let openai_model_info = get_model_info(&model); let model_context_window = cfg .model_context_window .or_else(|| openai_model_info.as_ref().map(|info| info.context_window)); let model_max_output_tokens = cfg.model_max_output_tokens.or_else(|| { openai_model_info .as_ref() .map(|info| info.max_output_tokens) }); let config = Self { model, model_context_window, model_max_output_tokens, model_provider_id, model_provider, cwd: resolved_cwd, approval_policy: approval_policy .or(config_profile.approval_policy) .or(cfg.approval_policy) .unwrap_or_else(AskForApproval::default), sandbox_policy, shell_environment_policy, disable_response_storage: config_profile .disable_response_storage .or(cfg.disable_response_storage) .unwrap_or(false), notify: cfg.notify, instructions, mcp_servers: cfg.mcp_servers, model_providers, project_doc_max_bytes: cfg.project_doc_max_bytes.unwrap_or(PROJECT_DOC_MAX_BYTES), codex_home, history, file_opener: cfg.file_opener.unwrap_or(UriBasedFileOpener::VsCode), tui: cfg.tui.unwrap_or_default(), codex_linux_sandbox_exe, hide_agent_reasoning: cfg.hide_agent_reasoning.unwrap_or(false), model_reasoning_effort: cfg.model_reasoning_effort.unwrap_or_default(), model_reasoning_summary: cfg.model_reasoning_summary.unwrap_or_default(), }; Ok(config) } fn load_instructions(codex_dir: Option<&Path>) -> Option { let mut p = match codex_dir { Some(p) => p.to_path_buf(), None => return None, }; p.push("instructions.md"); std::fs::read_to_string(&p).ok().and_then(|s| { let s = s.trim(); if s.is_empty() { None } else { Some(s.to_string()) } }) } } fn default_model() -> String { OPENAI_DEFAULT_MODEL.to_string() } /// Returns the path to the Codex configuration directory, which can be /// specified by the `CODEX_HOME` environment variable. If not set, defaults to /// `~/.codex`. /// /// - If `CODEX_HOME` is set, the value will be canonicalized and this /// function will Err if the path does not exist. /// - If `CODEX_HOME` is not set, this function does not verify that the /// directory exists. fn find_codex_home() -> std::io::Result { // Honor the `CODEX_HOME` environment variable when it is set to allow users // (and tests) to override the default location. if let Ok(val) = std::env::var("CODEX_HOME") { if !val.is_empty() { return PathBuf::from(val).canonicalize(); } } let mut p = home_dir().ok_or_else(|| { std::io::Error::new( std::io::ErrorKind::NotFound, "Could not find home directory", ) })?; p.push(".codex"); Ok(p) } /// Returns the path to the folder where Codex logs are stored. Does not verify /// that the directory exists. pub fn log_dir(cfg: &Config) -> std::io::Result { let mut p = cfg.codex_home.clone(); p.push("log"); Ok(p) } #[cfg(test)] mod tests { #![allow(clippy::expect_used, clippy::unwrap_used)] use crate::config_types::HistoryPersistence; use super::*; use pretty_assertions::assert_eq; use tempfile::TempDir; #[test] fn test_toml_parsing() { let history_with_persistence = r#" [history] persistence = "save-all" "#; let history_with_persistence_cfg = toml::from_str::(history_with_persistence) .expect("TOML deserialization should succeed"); assert_eq!( Some(History { persistence: HistoryPersistence::SaveAll, max_bytes: None, }), history_with_persistence_cfg.history ); let history_no_persistence = r#" [history] persistence = "none" "#; let history_no_persistence_cfg = toml::from_str::(history_no_persistence) .expect("TOML deserialization should succeed"); assert_eq!( Some(History { persistence: HistoryPersistence::None, max_bytes: None, }), history_no_persistence_cfg.history ); } #[test] fn test_sandbox_config_parsing() { let sandbox_full_access = r#" sandbox_mode = "danger-full-access" [sandbox_workspace_write] network_access = false # This should be ignored. "#; let sandbox_full_access_cfg = toml::from_str::(sandbox_full_access) .expect("TOML deserialization should succeed"); let sandbox_mode_override = None; assert_eq!( SandboxPolicy::DangerFullAccess, sandbox_full_access_cfg.derive_sandbox_policy(sandbox_mode_override) ); let sandbox_read_only = r#" sandbox_mode = "read-only" [sandbox_workspace_write] network_access = true # This should be ignored. "#; let sandbox_read_only_cfg = toml::from_str::(sandbox_read_only) .expect("TOML deserialization should succeed"); let sandbox_mode_override = None; assert_eq!( SandboxPolicy::ReadOnly, sandbox_read_only_cfg.derive_sandbox_policy(sandbox_mode_override) ); let sandbox_workspace_write = r#" sandbox_mode = "workspace-write" [sandbox_workspace_write] writable_roots = [ "/tmp", ] "#; let sandbox_workspace_write_cfg = toml::from_str::(sandbox_workspace_write) .expect("TOML deserialization should succeed"); let sandbox_mode_override = None; assert_eq!( SandboxPolicy::WorkspaceWrite { writable_roots: vec![PathBuf::from("/tmp")], network_access: false, }, sandbox_workspace_write_cfg.derive_sandbox_policy(sandbox_mode_override) ); } struct PrecedenceTestFixture { cwd: TempDir, codex_home: TempDir, cfg: ConfigToml, model_provider_map: HashMap, openai_provider: ModelProviderInfo, openai_chat_completions_provider: ModelProviderInfo, } impl PrecedenceTestFixture { fn cwd(&self) -> PathBuf { self.cwd.path().to_path_buf() } fn codex_home(&self) -> PathBuf { self.codex_home.path().to_path_buf() } } fn create_test_fixture() -> std::io::Result { let toml = r#" model = "o3" approval_policy = "untrusted" disable_response_storage = false # Can be used to determine which profile to use if not specified by # `ConfigOverrides`. profile = "gpt3" [model_providers.openai-chat-completions] name = "OpenAI using Chat Completions" base_url = "https://api.openai.com/v1" env_key = "OPENAI_API_KEY" wire_api = "chat" [profiles.o3] model = "o3" model_provider = "openai" approval_policy = "never" [profiles.gpt3] model = "gpt-3.5-turbo" model_provider = "openai-chat-completions" [profiles.zdr] model = "o3" model_provider = "openai" approval_policy = "on-failure" disable_response_storage = true "#; let cfg: ConfigToml = toml::from_str(toml).expect("TOML deserialization should succeed"); // Use a temporary directory for the cwd so it does not contain an // AGENTS.md file. let cwd_temp_dir = TempDir::new().unwrap(); let cwd = cwd_temp_dir.path().to_path_buf(); // Make it look like a Git repo so it does not search for AGENTS.md in // a parent folder, either. std::fs::write(cwd.join(".git"), "gitdir: nowhere")?; let codex_home_temp_dir = TempDir::new().unwrap(); let openai_chat_completions_provider = ModelProviderInfo { name: "OpenAI using Chat Completions".to_string(), base_url: "https://api.openai.com/v1".to_string(), env_key: Some("OPENAI_API_KEY".to_string()), wire_api: crate::WireApi::Chat, env_key_instructions: None, query_params: None, http_headers: None, env_http_headers: None, }; let model_provider_map = { let mut model_provider_map = built_in_model_providers(); model_provider_map.insert( "openai-chat-completions".to_string(), openai_chat_completions_provider.clone(), ); model_provider_map }; let openai_provider = model_provider_map .get("openai") .expect("openai provider should exist") .clone(); Ok(PrecedenceTestFixture { cwd: cwd_temp_dir, codex_home: codex_home_temp_dir, cfg, model_provider_map, openai_provider, openai_chat_completions_provider, }) } /// Users can specify config values at multiple levels that have the /// following precedence: /// /// 1. custom command-line argument, e.g. `--model o3` /// 2. as part of a profile, where the `--profile` is specified via a CLI /// (or in the config file itelf) /// 3. as an entry in `config.toml`, e.g. `model = "o3"` /// 4. the default value for a required field defined in code, e.g., /// `crate::flags::OPENAI_DEFAULT_MODEL` /// /// Note that profiles are the recommended way to specify a group of /// configuration options together. #[test] fn test_precedence_fixture_with_o3_profile() -> std::io::Result<()> { let fixture = create_test_fixture()?; let o3_profile_overrides = ConfigOverrides { config_profile: Some("o3".to_string()), cwd: Some(fixture.cwd()), ..Default::default() }; let o3_profile_config: Config = Config::load_from_base_config_with_overrides( fixture.cfg.clone(), o3_profile_overrides, fixture.codex_home(), )?; assert_eq!( Config { model: "o3".to_string(), model_context_window: Some(200_000), model_max_output_tokens: Some(100_000), model_provider_id: "openai".to_string(), model_provider: fixture.openai_provider.clone(), approval_policy: AskForApproval::Never, sandbox_policy: SandboxPolicy::new_read_only_policy(), shell_environment_policy: ShellEnvironmentPolicy::default(), disable_response_storage: false, instructions: None, notify: None, cwd: fixture.cwd(), mcp_servers: HashMap::new(), model_providers: fixture.model_provider_map.clone(), project_doc_max_bytes: PROJECT_DOC_MAX_BYTES, codex_home: fixture.codex_home(), history: History::default(), file_opener: UriBasedFileOpener::VsCode, tui: Tui::default(), codex_linux_sandbox_exe: None, hide_agent_reasoning: false, model_reasoning_effort: ReasoningEffort::default(), model_reasoning_summary: ReasoningSummary::default(), }, o3_profile_config ); Ok(()) } #[test] fn test_precedence_fixture_with_gpt3_profile() -> std::io::Result<()> { let fixture = create_test_fixture()?; let gpt3_profile_overrides = ConfigOverrides { config_profile: Some("gpt3".to_string()), cwd: Some(fixture.cwd()), ..Default::default() }; let gpt3_profile_config = Config::load_from_base_config_with_overrides( fixture.cfg.clone(), gpt3_profile_overrides, fixture.codex_home(), )?; let expected_gpt3_profile_config = Config { model: "gpt-3.5-turbo".to_string(), model_context_window: Some(16_385), model_max_output_tokens: Some(4_096), model_provider_id: "openai-chat-completions".to_string(), model_provider: fixture.openai_chat_completions_provider.clone(), approval_policy: AskForApproval::UnlessTrusted, sandbox_policy: SandboxPolicy::new_read_only_policy(), shell_environment_policy: ShellEnvironmentPolicy::default(), disable_response_storage: false, instructions: None, notify: None, cwd: fixture.cwd(), mcp_servers: HashMap::new(), model_providers: fixture.model_provider_map.clone(), project_doc_max_bytes: PROJECT_DOC_MAX_BYTES, codex_home: fixture.codex_home(), history: History::default(), file_opener: UriBasedFileOpener::VsCode, tui: Tui::default(), codex_linux_sandbox_exe: None, hide_agent_reasoning: false, model_reasoning_effort: ReasoningEffort::default(), model_reasoning_summary: ReasoningSummary::default(), }; assert_eq!(expected_gpt3_profile_config, gpt3_profile_config); // Verify that loading without specifying a profile in ConfigOverrides // uses the default profile from the config file (which is "gpt3"). let default_profile_overrides = ConfigOverrides { cwd: Some(fixture.cwd()), ..Default::default() }; let default_profile_config = Config::load_from_base_config_with_overrides( fixture.cfg.clone(), default_profile_overrides, fixture.codex_home(), )?; assert_eq!(expected_gpt3_profile_config, default_profile_config); Ok(()) } #[test] fn test_precedence_fixture_with_zdr_profile() -> std::io::Result<()> { let fixture = create_test_fixture()?; let zdr_profile_overrides = ConfigOverrides { config_profile: Some("zdr".to_string()), cwd: Some(fixture.cwd()), ..Default::default() }; let zdr_profile_config = Config::load_from_base_config_with_overrides( fixture.cfg.clone(), zdr_profile_overrides, fixture.codex_home(), )?; let expected_zdr_profile_config = Config { model: "o3".to_string(), model_context_window: Some(200_000), model_max_output_tokens: Some(100_000), model_provider_id: "openai".to_string(), model_provider: fixture.openai_provider.clone(), approval_policy: AskForApproval::OnFailure, sandbox_policy: SandboxPolicy::new_read_only_policy(), shell_environment_policy: ShellEnvironmentPolicy::default(), disable_response_storage: true, instructions: None, notify: None, cwd: fixture.cwd(), mcp_servers: HashMap::new(), model_providers: fixture.model_provider_map.clone(), project_doc_max_bytes: PROJECT_DOC_MAX_BYTES, codex_home: fixture.codex_home(), history: History::default(), file_opener: UriBasedFileOpener::VsCode, tui: Tui::default(), codex_linux_sandbox_exe: None, hide_agent_reasoning: false, model_reasoning_effort: ReasoningEffort::default(), model_reasoning_summary: ReasoningSummary::default(), }; assert_eq!(expected_zdr_profile_config, zdr_profile_config); Ok(()) } }