chore: rename (#3648)

This commit is contained in:
Michael Bolin
2025-09-15 08:17:13 -07:00
committed by GitHub
parent d60cbed691
commit f037b2fd56
8 changed files with 34 additions and 34 deletions

View File

@@ -1,4 +1,4 @@
use codex_core::config::SWIFTFOX_MEDIUM_MODEL;
use codex_core::config::GPT_5_CODEX_MEDIUM_MODEL;
use codex_core::protocol_config_types::ReasoningEffort;
use codex_protocol::mcp_protocol::AuthMode;
@@ -19,24 +19,24 @@ pub struct ModelPreset {
const PRESETS: &[ModelPreset] = &[
ModelPreset {
id: "swiftfox-low",
label: "swiftfox low",
id: "gpt-5-codex-low",
label: "gpt-5-codex low",
description: "",
model: "swiftfox",
model: "gpt-5-codex",
effort: Some(ReasoningEffort::Low),
},
ModelPreset {
id: "swiftfox-medium",
label: "swiftfox medium",
id: "gpt-5-codex-medium",
label: "gpt-5-codex medium",
description: "",
model: "swiftfox",
model: "gpt-5-codex",
effort: None,
},
ModelPreset {
id: "swiftfox-high",
label: "swiftfox high",
id: "gpt-5-codex-high",
label: "gpt-5-codex high",
description: "",
model: "swiftfox",
model: "gpt-5-codex",
effort: Some(ReasoningEffort::High),
},
ModelPreset {
@@ -74,7 +74,7 @@ pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
Some(AuthMode::ApiKey) => PRESETS
.iter()
.copied()
.filter(|p| p.model != SWIFTFOX_MEDIUM_MODEL)
.filter(|p| p.model != GPT_5_CODEX_MEDIUM_MODEL)
.collect(),
_ => PRESETS.to_vec(),
}

View File

@@ -1,4 +1,4 @@
You are Swiftfox. You are running as a coding agent in the Codex CLI on a user's computer.
You are Codex, based on GPT-5. You are running as a coding agent in the Codex CLI on a user's computer.
## General
@@ -26,7 +26,7 @@ When using the planning tool:
## Codex CLI harness, sandboxing, and approvals
The Codex CLI harness supports several different sandboxing, and approval configurations that the user can choose from.
The Codex CLI harness supports several different sandboxing, and approval configurations that the user can choose from.
Filesystem sandboxing defines which files can be read or written. The options are:
- **read-only**: You can only read files.

View File

@@ -215,7 +215,7 @@ mod tests {
expects_apply_patch_instructions: false,
},
InstructionsTestCase {
slug: "swiftfox",
slug: "gpt-5-codex",
expects_apply_patch_instructions: false,
},
];

View File

@@ -38,8 +38,8 @@ use toml_edit::Table as TomlTable;
const OPENAI_DEFAULT_MODEL: &str = "gpt-5";
const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5";
pub const SWIFTFOX_MEDIUM_MODEL: &str = "swiftfox";
pub const SWIFTFOX_MODEL_DISPLAY_NAME: &str = "swiftfox-medium";
pub const GPT_5_CODEX_MEDIUM_MODEL: &str = "gpt-5-codex";
pub const GPT_5_CODEX_DISPLAY_NAME: &str = "gpt-5-codex";
/// Maximum number of bytes of the documentation that will be embedded. Larger
/// files are *silently truncated* to this size so we do not take up too much of
@@ -1296,7 +1296,7 @@ exclude_slash_tmp = true
persist_model_selection(
codex_home.path(),
None,
"swiftfox",
"gpt-5-codex",
Some(ReasoningEffort::High),
)
.await?;
@@ -1305,7 +1305,7 @@ exclude_slash_tmp = true
tokio::fs::read_to_string(codex_home.path().join(CONFIG_TOML_FILE)).await?;
let parsed: ConfigToml = toml::from_str(&serialized)?;
assert_eq!(parsed.model.as_deref(), Some("swiftfox"));
assert_eq!(parsed.model.as_deref(), Some("gpt-5-codex"));
assert_eq!(parsed.model_reasoning_effort, Some(ReasoningEffort::High));
Ok(())
@@ -1359,7 +1359,7 @@ model = "gpt-4.1"
persist_model_selection(
codex_home.path(),
Some("dev"),
"swiftfox",
"gpt-5-codex",
Some(ReasoningEffort::Medium),
)
.await?;
@@ -1372,7 +1372,7 @@ model = "gpt-4.1"
.get("dev")
.expect("profile should be created");
assert_eq!(profile.model.as_deref(), Some("swiftfox"));
assert_eq!(profile.model.as_deref(), Some("gpt-5-codex"));
assert_eq!(
profile.model_reasoning_effort,
Some(ReasoningEffort::Medium)

View File

@@ -11,8 +11,8 @@ pub(crate) const INTERNAL_STORAGE_FILE: &str = "internal_storage.json";
pub struct InternalStorage {
#[serde(skip)]
storage_path: PathBuf,
#[serde(default, alias = "gpt_5_high_model_prompt_seen")]
pub swiftfox_model_prompt_seen: bool,
#[serde(default)]
pub gpt_5_codex_model_prompt_seen: bool,
}
// TODO(jif) generalise all the file writers and build proper async channel inserters.

View File

@@ -4,7 +4,7 @@ use crate::tool_apply_patch::ApplyPatchToolType;
/// The `instructions` field in the payload sent to a model should always start
/// with this content.
const BASE_INSTRUCTIONS: &str = include_str!("../prompt.md");
const SWIFTFOX_INSTRUCTIONS: &str = include_str!("../swiftfox_prompt.md");
const GPT_5_CODEX_INSTRUCTIONS: &str = include_str!("../gpt_5_codex_prompt.md");
/// A model family is a group of models that share certain characteristics.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -99,12 +99,12 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
model_family!(slug, "gpt-4o", needs_special_apply_patch_instructions: true)
} else if slug.starts_with("gpt-3.5") {
model_family!(slug, "gpt-3.5", needs_special_apply_patch_instructions: true)
} else if slug.starts_with("codex-") || slug.starts_with("swiftfox") {
} else if slug.starts_with("codex-") || slug.starts_with("gpt-5-codex") {
model_family!(
slug, slug,
supports_reasoning_summaries: true,
reasoning_summary_format: ReasoningSummaryFormat::Experimental,
base_instructions: SWIFTFOX_INSTRUCTIONS.to_string(),
base_instructions: GPT_5_CODEX_INSTRUCTIONS.to_string(),
)
} else if slug.starts_with("gpt-5") {
model_family!(

View File

@@ -11,7 +11,7 @@ use codex_core::RolloutRecorder;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::SWIFTFOX_MEDIUM_MODEL;
use codex_core::config::GPT_5_CODEX_MEDIUM_MODEL;
use codex_core::config::find_codex_home;
use codex_core::config::load_config_as_toml_with_cli_overrides;
use codex_core::config::persist_model_selection;
@@ -381,9 +381,9 @@ async fn run_ratatui_app(
&cli,
&config,
active_profile.as_deref(),
internal_storage.swiftfox_model_prompt_seen,
internal_storage.gpt_5_codex_model_prompt_seen,
) {
internal_storage.swiftfox_model_prompt_seen = true;
internal_storage.gpt_5_codex_model_prompt_seen = true;
if let Err(e) = internal_storage.persist().await {
error!("Failed to persist internal storage: {e:?}");
}
@@ -392,7 +392,7 @@ async fn run_ratatui_app(
let switch_to_new_model = upgrade_decision == ModelUpgradeDecision::Switch;
if switch_to_new_model {
config.model = SWIFTFOX_MEDIUM_MODEL.to_owned();
config.model = GPT_5_CODEX_MEDIUM_MODEL.to_owned();
config.model_reasoning_effort = None;
if let Err(e) = persist_model_selection(
&config.codex_home,
@@ -525,13 +525,13 @@ fn should_show_model_rollout_prompt(
cli: &Cli,
config: &Config,
active_profile: Option<&str>,
swiftfox_model_prompt_seen: bool,
gpt_5_codex_model_prompt_seen: bool,
) -> bool {
let login_status = get_login_status(config);
active_profile.is_none()
&& cli.model.is_none()
&& !swiftfox_model_prompt_seen
&& !gpt_5_codex_model_prompt_seen
&& config.model_provider.requires_openai_auth
&& matches!(login_status, LoginStatus::AuthMode(AuthMode::ChatGPT))
&& !cli.oss

View File

@@ -3,7 +3,7 @@ use crate::frames::FRAME_TICK_DEFAULT;
use crate::tui::FrameRequester;
use crate::tui::Tui;
use crate::tui::TuiEvent;
use codex_core::config::SWIFTFOX_MODEL_DISPLAY_NAME;
use codex_core::config::GPT_5_CODEX_DISPLAY_NAME;
use color_eyre::eyre::Result;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
@@ -130,7 +130,7 @@ impl WidgetRef for &ModelUpgradePopup {
lines.push("".into());
lines.push(
format!(" Codex is now powered by {SWIFTFOX_MODEL_DISPLAY_NAME}, a new model that is")
format!(" Codex is now powered by {GPT_5_CODEX_DISPLAY_NAME}, a new model that is")
.into(),
);
lines.push(Line::from(vec![
@@ -156,7 +156,7 @@ impl WidgetRef for &ModelUpgradePopup {
lines.push(create_option(
0,
ModelUpgradeOption::TryNewModel,
&format!("Yes, switch me to {SWIFTFOX_MODEL_DISPLAY_NAME}"),
&format!("Yes, switch me to {GPT_5_CODEX_DISPLAY_NAME}"),
));
lines.push("".into());
lines.push(create_option(