feat: feature flag (#4948)

Add proper feature flag instead of having custom flags for everything.
This is just for experimental/wip part of the code
It can be used through CLI:
```bash
codex --enable unified_exec --disable view_image_tool
```

Or in the `config.toml`
```toml
# Global toggles applied to every profile unless overridden.
[features]
apply_patch_freeform = true
view_image_tool = false
```

Follow-up:
In a following PR, the goal is to have a default have `bundles` of
features that we can associate to a model
This commit is contained in:
jif-oai
2025-10-14 18:50:00 +01:00
committed by GitHub
parent d6c5df9a0a
commit f7b4e29609
16 changed files with 762 additions and 176 deletions

View File

@@ -17,6 +17,10 @@ use crate::config_types::ShellEnvironmentPolicy;
use crate::config_types::ShellEnvironmentPolicyToml;
use crate::config_types::Tui;
use crate::config_types::UriBasedFileOpener;
use crate::features::Feature;
use crate::features::FeatureOverrides;
use crate::features::Features;
use crate::features::FeaturesToml;
use crate::git_info::resolve_root_git_project_for_trust;
use crate::model_family::ModelFamily;
use crate::model_family::derive_default_model_family;
@@ -218,6 +222,9 @@ pub struct Config {
/// Include the `view_image` tool that lets the agent attach a local image path to context.
pub include_view_image_tool: bool,
/// Centralized feature flags; source of truth for feature gating.
pub features: Features,
/// The active profile name used to derive this `Config` (if any).
pub active_profile: Option<String>,
@@ -794,19 +801,15 @@ pub struct ConfigToml {
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: Option<String>,
/// Experimental path to a file whose contents replace the built-in BASE_INSTRUCTIONS.
pub experimental_instructions_file: Option<PathBuf>,
pub experimental_use_exec_command_tool: Option<bool>,
pub experimental_use_unified_exec_tool: Option<bool>,
pub experimental_use_rmcp_client: Option<bool>,
pub experimental_use_freeform_apply_patch: Option<bool>,
pub projects: Option<HashMap<String, ProjectConfig>>,
/// Nested tools section for feature toggles
pub tools: Option<ToolsToml>,
/// Centralized feature flags (new). Prefer this over individual toggles.
#[serde(default)]
pub features: Option<FeaturesToml>,
/// When true, disables burst-paste detection for typed input entirely.
/// All characters are inserted as they are received, and no buffering
/// or placeholder replacement will occur for fast keypress bursts.
@@ -817,6 +820,13 @@ pub struct ConfigToml {
/// Tracks whether the Windows onboarding screen has been acknowledged.
pub windows_wsl_setup_acknowledged: Option<bool>,
/// Legacy, now use features
pub experimental_instructions_file: Option<PathBuf>,
pub experimental_use_exec_command_tool: Option<bool>,
pub experimental_use_unified_exec_tool: Option<bool>,
pub experimental_use_rmcp_client: Option<bool>,
pub experimental_use_freeform_apply_patch: Option<bool>,
}
impl From<ConfigToml> for UserSavedConfig {
@@ -980,9 +990,9 @@ impl Config {
config_profile: config_profile_key,
codex_linux_sandbox_exe,
base_instructions,
include_plan_tool,
include_apply_patch_tool,
include_view_image_tool,
include_plan_tool: include_plan_tool_override,
include_apply_patch_tool: include_apply_patch_tool_override,
include_view_image_tool: include_view_image_tool_override,
show_raw_agent_reasoning,
tools_web_search_request: override_tools_web_search_request,
} = overrides;
@@ -1005,6 +1015,15 @@ impl Config {
None => ConfigProfile::default(),
};
let feature_overrides = FeatureOverrides {
include_plan_tool: include_plan_tool_override,
include_apply_patch_tool: include_apply_patch_tool_override,
include_view_image_tool: include_view_image_tool_override,
web_search_request: override_tools_web_search_request,
};
let features = Features::from_config(&cfg, &config_profile, feature_overrides);
let sandbox_policy = cfg.derive_sandbox_policy(sandbox_mode);
let mut model_providers = built_in_model_providers();
@@ -1050,13 +1069,13 @@ impl Config {
let history = cfg.history.unwrap_or_default();
let tools_web_search_request = override_tools_web_search_request
.or(cfg.tools.as_ref().and_then(|t| t.web_search))
.unwrap_or(false);
let include_view_image_tool = include_view_image_tool
.or(cfg.tools.as_ref().and_then(|t| t.view_image))
.unwrap_or(true);
let include_plan_tool_flag = features.enabled(Feature::PlanTool);
let include_apply_patch_tool_flag = features.enabled(Feature::ApplyPatchFreeform);
let include_view_image_tool_flag = features.enabled(Feature::ViewImageTool);
let tools_web_search_request = features.enabled(Feature::WebSearchRequest);
let use_experimental_streamable_shell_tool = features.enabled(Feature::StreamableShell);
let use_experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
let use_experimental_use_rmcp_client = features.enabled(Feature::RmcpClient);
let model = model
.or(config_profile.model)
@@ -1164,19 +1183,14 @@ impl Config {
.chatgpt_base_url
.or(cfg.chatgpt_base_url)
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
include_plan_tool: include_plan_tool.unwrap_or(false),
include_apply_patch_tool: include_apply_patch_tool
.or(cfg.experimental_use_freeform_apply_patch)
.unwrap_or(false),
include_plan_tool: include_plan_tool_flag,
include_apply_patch_tool: include_apply_patch_tool_flag,
tools_web_search_request,
use_experimental_streamable_shell_tool: cfg
.experimental_use_exec_command_tool
.unwrap_or(false),
use_experimental_unified_exec_tool: cfg
.experimental_use_unified_exec_tool
.unwrap_or(false),
use_experimental_use_rmcp_client: cfg.experimental_use_rmcp_client.unwrap_or(false),
include_view_image_tool,
use_experimental_streamable_shell_tool,
use_experimental_unified_exec_tool,
use_experimental_use_rmcp_client,
include_view_image_tool: include_view_image_tool_flag,
features,
active_profile: active_profile_name,
windows_wsl_setup_acknowledged: cfg.windows_wsl_setup_acknowledged.unwrap_or(false),
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
@@ -1309,6 +1323,7 @@ pub fn log_dir(cfg: &Config) -> std::io::Result<PathBuf> {
mod tests {
use crate::config_types::HistoryPersistence;
use crate::config_types::Notifications;
use crate::features::Feature;
use super::*;
use pretty_assertions::assert_eq;
@@ -1436,6 +1451,93 @@ exclude_slash_tmp = true
Ok(())
}
#[test]
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
let mut profiles = HashMap::new();
profiles.insert(
"work".to_string(),
ConfigProfile {
include_plan_tool: Some(true),
include_view_image_tool: Some(false),
..Default::default()
},
);
let cfg = ConfigToml {
profiles,
profile: Some("work".to_string()),
..Default::default()
};
let config = Config::load_from_base_config_with_overrides(
cfg,
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)?;
assert!(config.features.enabled(Feature::PlanTool));
assert!(!config.features.enabled(Feature::ViewImageTool));
assert!(config.include_plan_tool);
assert!(!config.include_view_image_tool);
Ok(())
}
#[test]
fn feature_table_overrides_legacy_flags() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
let mut entries = BTreeMap::new();
entries.insert("plan_tool".to_string(), false);
entries.insert("apply_patch_freeform".to_string(), false);
let cfg = ConfigToml {
features: Some(crate::features::FeaturesToml { entries }),
..Default::default()
};
let config = Config::load_from_base_config_with_overrides(
cfg,
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)?;
assert!(!config.features.enabled(Feature::PlanTool));
assert!(!config.features.enabled(Feature::ApplyPatchFreeform));
assert!(!config.include_plan_tool);
assert!(!config.include_apply_patch_tool);
Ok(())
}
#[test]
fn legacy_toggles_map_to_features() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
let cfg = ConfigToml {
experimental_use_exec_command_tool: Some(true),
experimental_use_unified_exec_tool: Some(true),
experimental_use_rmcp_client: Some(true),
experimental_use_freeform_apply_patch: Some(true),
..Default::default()
};
let config = Config::load_from_base_config_with_overrides(
cfg,
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)?;
assert!(config.features.enabled(Feature::ApplyPatchFreeform));
assert!(config.features.enabled(Feature::StreamableShell));
assert!(config.features.enabled(Feature::UnifiedExec));
assert!(config.features.enabled(Feature::RmcpClient));
assert!(config.include_apply_patch_tool);
assert!(config.use_experimental_streamable_shell_tool);
assert!(config.use_experimental_unified_exec_tool);
assert!(config.use_experimental_use_rmcp_client);
Ok(())
}
#[test]
fn config_honors_explicit_file_oauth_store_mode() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
@@ -2120,6 +2222,7 @@ model_verbosity = "high"
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
features: Features::with_defaults(),
active_profile: Some("o3".to_string()),
windows_wsl_setup_acknowledged: false,
disable_paste_burst: false,
@@ -2183,6 +2286,7 @@ model_verbosity = "high"
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
features: Features::with_defaults(),
active_profile: Some("gpt3".to_string()),
windows_wsl_setup_acknowledged: false,
disable_paste_burst: false,
@@ -2261,6 +2365,7 @@ model_verbosity = "high"
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
features: Features::with_defaults(),
active_profile: Some("zdr".to_string()),
windows_wsl_setup_acknowledged: false,
disable_paste_burst: false,
@@ -2325,6 +2430,7 @@ model_verbosity = "high"
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
features: Features::with_defaults(),
active_profile: Some("gpt5".to_string()),
windows_wsl_setup_acknowledged: false,
disable_paste_burst: false,