This commit is contained in:
Thibault Sottiaux
2025-09-12 22:44:05 -07:00
parent 3d4acbaea0
commit 19b4ed3c96
4 changed files with 34 additions and 16 deletions

View File

@@ -50,10 +50,24 @@ pub fn builtin_model_presets() -> &'static [ModelPreset] {
effort: Some(ReasoningEffort::High),
},
ModelPreset {
id: "gpt-5-high-new",
label: "gpt-5 high new",
description: "our latest release tuned to rely on the model's built-in reasoning defaults",
model: "gpt-5-high-new",
id: "swiftfox-low",
label: "swiftfox low",
description: "balances speed with some reasoning; useful for straightforward queries and short explanations",
model: "swiftfox-low",
effort: None,
},
ModelPreset {
id: "swiftfox-medium",
label: "swiftfox medium",
description: "— default setting; provides a solid balance of reasoning depth and latency for general-purpose tasks",
model: "swiftfox-medium",
effort: None,
},
ModelPreset {
id: "swiftfox-high",
label: "swiftfox high",
description: "",
model: "swiftfox-high",
effort: None,
},
];

View File

@@ -33,7 +33,7 @@ use toml_edit::DocumentMut;
const OPENAI_DEFAULT_MODEL: &str = "gpt-5";
const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5";
pub const GPT5_HIGH_MODEL: &str = "gpt-5-high-new";
pub const SWIFTFOX_MEDIUM_MODEL: &str = "swiftfox-medium";
/// Maximum number of bytes of the documentation that will be embedded. Larger
/// files are *silently truncated* to this size so we do not take up too much of
@@ -1184,7 +1184,7 @@ exclude_slash_tmp = true
persist_model_selection(
codex_home.path(),
None,
"gpt-5-high-new",
"swiftfox-high",
Some(ReasoningEffort::High),
)
.await?;
@@ -1193,7 +1193,7 @@ exclude_slash_tmp = true
tokio::fs::read_to_string(codex_home.path().join(CONFIG_TOML_FILE)).await?;
let parsed: ConfigToml = toml::from_str(&serialized)?;
assert_eq!(parsed.model.as_deref(), Some("gpt-5-high-new"));
assert_eq!(parsed.model.as_deref(), Some("swiftfox-high"));
assert_eq!(parsed.model_reasoning_effort, Some(ReasoningEffort::High));
Ok(())
@@ -1247,8 +1247,8 @@ model = "gpt-4.1"
persist_model_selection(
codex_home.path(),
Some("dev"),
"gpt-5-high-new",
Some(ReasoningEffort::Low),
"swiftfox-medium",
Some(ReasoningEffort::Medium),
)
.await?;
@@ -1260,8 +1260,11 @@ model = "gpt-4.1"
.get("dev")
.expect("profile should be created");
assert_eq!(profile.model.as_deref(), Some("gpt-5-high-new"));
assert_eq!(profile.model_reasoning_effort, Some(ReasoningEffort::Low));
assert_eq!(profile.model.as_deref(), Some("swiftfox-medium"));
assert_eq!(
profile.model_reasoning_effort,
Some(ReasoningEffort::Medium)
);
Ok(())
}

View File

@@ -11,7 +11,7 @@ use codex_core::RolloutRecorder;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::GPT5_HIGH_MODEL;
use codex_core::config::SWIFTFOX_MEDIUM_MODEL;
use codex_core::config::find_codex_home;
use codex_core::config::load_config_as_toml_with_cli_overrides;
use codex_core::config::persist_model_selection;
@@ -380,7 +380,7 @@ async fn run_ratatui_app(
let switch_to_new_model = upgrade_decision == ModelUpgradeDecision::Switch;
if switch_to_new_model {
config.model = GPT5_HIGH_MODEL.to_owned();
config.model = SWIFTFOX_MEDIUM_MODEL.to_owned();
config.model_reasoning_effort = None;
if let Err(e) = persist_model_selection(
&config.codex_home,

View File

@@ -1,7 +1,7 @@
use crate::tui::FrameRequester;
use crate::tui::Tui;
use crate::tui::TuiEvent;
use codex_core::config::GPT5_HIGH_MODEL;
use codex_core::config::SWIFTFOX_MEDIUM_MODEL;
use color_eyre::eyre::Result;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
@@ -83,7 +83,8 @@ impl WidgetRef for &ModelUpgradePopup {
let mut lines: Vec<Line> = vec![
String::new().into(),
format!(" Codex is now powered by {GPT5_HIGH_MODEL}, a new model that is").into(),
format!(" Codex is now powered by {SWIFTFOX_MEDIUM_MODEL}, a new model that is")
.into(),
Line::from(vec![
" ".into(),
"faster, a better collaborator, ".bold(),
@@ -108,7 +109,7 @@ impl WidgetRef for &ModelUpgradePopup {
lines.push(create_option(
0,
ModelUpgradeOption::TryNewModel,
&format!("Yes, switch me to {GPT5_HIGH_MODEL}"),
&format!("Yes, switch me to {SWIFTFOX_MEDIUM_MODEL}"),
));
lines.push(create_option(
1,