@@ -49,6 +49,13 @@ pub fn builtin_model_presets() -> &'static [ModelPreset] {
|
||||
model: "gpt-5",
|
||||
effort: ReasoningEffort::High,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-high-new",
|
||||
label: "gpt-5 high new",
|
||||
description: "— our latest release tuned to rely on the model's built-in reasoning defaults",
|
||||
model: "gpt-5-high-new",
|
||||
effort: ReasoningEffort::Medium,
|
||||
},
|
||||
];
|
||||
PRESETS
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ use crate::model_provider_info::built_in_model_providers;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use anyhow::Context;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -31,6 +32,7 @@ use toml::Value as TomlValue;
|
||||
use toml_edit::DocumentMut;
|
||||
|
||||
const OPENAI_DEFAULT_MODEL: &str = "gpt-5";
|
||||
pub const GPT5_HIGH_MODEL: &str = "gpt-5-high";
|
||||
|
||||
/// Maximum number of bytes of the documentation that will be embedded. Larger
|
||||
/// files are *silently truncated* to this size so we do not take up too much of
|
||||
@@ -128,9 +130,6 @@ pub struct Config {
|
||||
/// output will be hyperlinked using the specified URI scheme.
|
||||
pub file_opener: UriBasedFileOpener,
|
||||
|
||||
/// Collection of settings that are specific to the TUI.
|
||||
pub tui: Tui,
|
||||
|
||||
/// Path to the `codex-linux-sandbox` executable. This must be set if
|
||||
/// [`crate::exec::SandboxType::LinuxSeccomp`] is used. Note that this
|
||||
/// cannot be set in the config file: it must be set in code via
|
||||
@@ -351,6 +350,107 @@ pub fn set_project_trusted(codex_home: &Path, project_path: &Path) -> anyhow::Re
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_profile_table<'a>(
|
||||
doc: &'a mut DocumentMut,
|
||||
profile_name: &str,
|
||||
) -> anyhow::Result<&'a mut toml_edit::Table> {
|
||||
let mut created_profiles_table = false;
|
||||
{
|
||||
let root = doc.as_table_mut();
|
||||
let needs_table = !root.contains_key("profiles")
|
||||
|| root
|
||||
.get("profiles")
|
||||
.and_then(|item| item.as_table())
|
||||
.is_none();
|
||||
if needs_table {
|
||||
root.insert("profiles", toml_edit::table());
|
||||
created_profiles_table = true;
|
||||
}
|
||||
}
|
||||
|
||||
let Some(profiles_table) = doc["profiles"].as_table_mut() else {
|
||||
return Err(anyhow::anyhow!(
|
||||
"profiles table missing after initialization"
|
||||
));
|
||||
};
|
||||
|
||||
if created_profiles_table {
|
||||
profiles_table.set_implicit(true);
|
||||
}
|
||||
|
||||
let needs_profile_table = !profiles_table.contains_key(profile_name)
|
||||
|| profiles_table
|
||||
.get(profile_name)
|
||||
.and_then(|item| item.as_table())
|
||||
.is_none();
|
||||
if needs_profile_table {
|
||||
profiles_table.insert(profile_name, toml_edit::table());
|
||||
}
|
||||
|
||||
let Some(profile_table) = profiles_table
|
||||
.get_mut(profile_name)
|
||||
.and_then(|item| item.as_table_mut())
|
||||
else {
|
||||
return Err(anyhow::anyhow!(format!(
|
||||
"profile table missing for {profile_name}"
|
||||
)));
|
||||
};
|
||||
|
||||
profile_table.set_implicit(false);
|
||||
Ok(profile_table)
|
||||
}
|
||||
|
||||
// TODO(jif) refactor config persistence.
|
||||
pub async fn persist_model_selection(
|
||||
codex_home: &Path,
|
||||
active_profile: Option<&str>,
|
||||
model: &str,
|
||||
effort: Option<ReasoningEffort>,
|
||||
) -> anyhow::Result<()> {
|
||||
let config_path = codex_home.join(CONFIG_TOML_FILE);
|
||||
let serialized = match tokio::fs::read_to_string(&config_path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => String::new(),
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
|
||||
let mut doc = if serialized.is_empty() {
|
||||
DocumentMut::new()
|
||||
} else {
|
||||
serialized.parse::<DocumentMut>()?
|
||||
};
|
||||
|
||||
if let Some(profile_name) = active_profile {
|
||||
let profile_table = ensure_profile_table(&mut doc, profile_name)?;
|
||||
profile_table["model"] = toml_edit::value(model);
|
||||
if let Some(effort) = effort {
|
||||
profile_table["model_reasoning_effort"] = toml_edit::value(effort.to_string());
|
||||
}
|
||||
} else {
|
||||
let table = doc.as_table_mut();
|
||||
table["model"] = toml_edit::value(model);
|
||||
if let Some(effort) = effort {
|
||||
table["model_reasoning_effort"] = toml_edit::value(effort.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(jif) refactor the home creation
|
||||
tokio::fs::create_dir_all(codex_home)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to create Codex home directory at {}",
|
||||
codex_home.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
tokio::fs::write(&config_path, doc.to_string())
|
||||
.await
|
||||
.with_context(|| format!("failed to persist config.toml at {}", config_path.display()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Apply a single dotted-path override onto a TOML value.
|
||||
fn apply_toml_override(root: &mut TomlValue, path: &str, value: TomlValue) {
|
||||
use toml::value::Table;
|
||||
@@ -804,7 +904,6 @@ impl Config {
|
||||
codex_home,
|
||||
history,
|
||||
file_opener: cfg.file_opener.unwrap_or(UriBasedFileOpener::VsCode),
|
||||
tui: cfg.tui.unwrap_or_default(),
|
||||
codex_linux_sandbox_exe,
|
||||
|
||||
hide_agent_reasoning: cfg.hide_agent_reasoning.unwrap_or(false),
|
||||
@@ -948,6 +1047,7 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
@@ -1038,6 +1138,145 @@ exclude_slash_tmp = true
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_updates_defaults() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
persist_model_selection(
|
||||
codex_home.path(),
|
||||
None,
|
||||
"gpt-5-high-new",
|
||||
Some(ReasoningEffort::High),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let serialized =
|
||||
tokio::fs::read_to_string(codex_home.path().join(CONFIG_TOML_FILE)).await?;
|
||||
let parsed: ConfigToml = toml::from_str(&serialized)?;
|
||||
|
||||
assert_eq!(parsed.model.as_deref(), Some("gpt-5-high-new"));
|
||||
assert_eq!(parsed.model_reasoning_effort, Some(ReasoningEffort::High));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_overwrites_existing_model() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
|
||||
tokio::fs::write(
|
||||
&config_path,
|
||||
r#"
|
||||
model = "gpt-5"
|
||||
model_reasoning_effort = "medium"
|
||||
|
||||
[profiles.dev]
|
||||
model = "gpt-4.1"
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
|
||||
persist_model_selection(
|
||||
codex_home.path(),
|
||||
None,
|
||||
"o4-mini",
|
||||
Some(ReasoningEffort::High),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let serialized = tokio::fs::read_to_string(config_path).await?;
|
||||
let parsed: ConfigToml = toml::from_str(&serialized)?;
|
||||
|
||||
assert_eq!(parsed.model.as_deref(), Some("o4-mini"));
|
||||
assert_eq!(parsed.model_reasoning_effort, Some(ReasoningEffort::High));
|
||||
assert_eq!(
|
||||
parsed
|
||||
.profiles
|
||||
.get("dev")
|
||||
.and_then(|profile| profile.model.as_deref()),
|
||||
Some("gpt-4.1"),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_updates_profile() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
persist_model_selection(
|
||||
codex_home.path(),
|
||||
Some("dev"),
|
||||
"gpt-5-high-new",
|
||||
Some(ReasoningEffort::Low),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let serialized =
|
||||
tokio::fs::read_to_string(codex_home.path().join(CONFIG_TOML_FILE)).await?;
|
||||
let parsed: ConfigToml = toml::from_str(&serialized)?;
|
||||
let profile = parsed
|
||||
.profiles
|
||||
.get("dev")
|
||||
.expect("profile should be created");
|
||||
|
||||
assert_eq!(profile.model.as_deref(), Some("gpt-5-high-new"));
|
||||
assert_eq!(profile.model_reasoning_effort, Some(ReasoningEffort::Low));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_updates_existing_profile() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
|
||||
tokio::fs::write(
|
||||
&config_path,
|
||||
r#"
|
||||
[profiles.dev]
|
||||
model = "gpt-4"
|
||||
model_reasoning_effort = "medium"
|
||||
|
||||
[profiles.prod]
|
||||
model = "gpt-5"
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
|
||||
persist_model_selection(
|
||||
codex_home.path(),
|
||||
Some("dev"),
|
||||
"o4-high",
|
||||
Some(ReasoningEffort::Medium),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let serialized = tokio::fs::read_to_string(config_path).await?;
|
||||
let parsed: ConfigToml = toml::from_str(&serialized)?;
|
||||
|
||||
let dev_profile = parsed
|
||||
.profiles
|
||||
.get("dev")
|
||||
.expect("dev profile should survive updates");
|
||||
assert_eq!(dev_profile.model.as_deref(), Some("o4-high"));
|
||||
assert_eq!(
|
||||
dev_profile.model_reasoning_effort,
|
||||
Some(ReasoningEffort::Medium)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
parsed
|
||||
.profiles
|
||||
.get("prod")
|
||||
.and_then(|profile| profile.model.as_deref()),
|
||||
Some("gpt-5"),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct PrecedenceTestFixture {
|
||||
cwd: TempDir,
|
||||
codex_home: TempDir,
|
||||
@@ -1196,7 +1435,6 @@ model_verbosity = "high"
|
||||
codex_home: fixture.codex_home(),
|
||||
history: History::default(),
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
@@ -1253,7 +1491,6 @@ model_verbosity = "high"
|
||||
codex_home: fixture.codex_home(),
|
||||
history: History::default(),
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
@@ -1325,7 +1562,6 @@ model_verbosity = "high"
|
||||
codex_home: fixture.codex_home(),
|
||||
history: History::default(),
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
@@ -1383,7 +1619,6 @@ model_verbosity = "high"
|
||||
codex_home: fixture.codex_home(),
|
||||
history: History::default(),
|
||||
file_opener: UriBasedFileOpener::VsCode,
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
|
||||
68
codex-rs/core/src/internal_storage.rs
Normal file
68
codex-rs/core/src/internal_storage.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
use anyhow::Context;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub(crate) const INTERNAL_STORAGE_FILE: &str = "internal_storage.json";
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
pub struct InternalStorage {
|
||||
#[serde(skip)]
|
||||
storage_path: PathBuf,
|
||||
#[serde(default)]
|
||||
pub gpt_5_high_model_prompt_seen: bool,
|
||||
}
|
||||
|
||||
// TODO(jif) generalise all the file writers and build proper async channel inserters.
|
||||
impl InternalStorage {
|
||||
pub fn load(codex_home: &Path) -> Self {
|
||||
let storage_path = codex_home.join(INTERNAL_STORAGE_FILE);
|
||||
|
||||
match std::fs::read_to_string(&storage_path) {
|
||||
Ok(serialized) => match serde_json::from_str::<Self>(&serialized) {
|
||||
Ok(mut storage) => {
|
||||
storage.storage_path = storage_path;
|
||||
storage
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to parse internal storage: {error:?}");
|
||||
Self::empty(storage_path)
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to read internal storage: {error:?}");
|
||||
Self::empty(storage_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(storage_path: PathBuf) -> Self {
|
||||
Self {
|
||||
storage_path,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn persist(&self) -> anyhow::Result<()> {
|
||||
let serialized = serde_json::to_string_pretty(self)?;
|
||||
|
||||
if let Some(parent) = self.storage_path.parent() {
|
||||
tokio::fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create internal storage directory at {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
tokio::fs::write(&self.storage_path, serialized)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to persist internal storage at {}",
|
||||
self.storage_path.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,7 @@ mod exec_command;
|
||||
pub mod exec_env;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
pub mod internal_storage;
|
||||
mod is_safe_command;
|
||||
pub mod landlock;
|
||||
mod mcp_connection_manager;
|
||||
@@ -74,6 +75,7 @@ pub use rollout::list::ConversationsPage;
|
||||
pub use rollout::list::Cursor;
|
||||
mod user_notification;
|
||||
pub mod util;
|
||||
|
||||
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
|
||||
pub use safety::get_platform_sandbox;
|
||||
// Re-export the protocol types from the standalone `codex-protocol` crate so existing
|
||||
|
||||
@@ -11,6 +11,8 @@ use codex_ansi_escape::ansi_escape_line;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::persist_model_selection;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use color_eyre::eyre::Result;
|
||||
use color_eyre::eyre::WrapErr;
|
||||
@@ -37,6 +39,9 @@ pub(crate) struct App {
|
||||
|
||||
/// Config is stored here so we can recreate ChatWidgets as needed.
|
||||
pub(crate) config: Config,
|
||||
pub(crate) active_profile: Option<String>,
|
||||
model_saved_to_profile: bool,
|
||||
model_saved_to_global: bool,
|
||||
|
||||
pub(crate) file_search: FileSearchManager,
|
||||
|
||||
@@ -61,6 +66,7 @@ impl App {
|
||||
tui: &mut tui::Tui,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
config: Config,
|
||||
active_profile: Option<String>,
|
||||
initial_prompt: Option<String>,
|
||||
initial_images: Vec<PathBuf>,
|
||||
resume_selection: ResumeSelection,
|
||||
@@ -119,6 +125,9 @@ impl App {
|
||||
app_event_tx,
|
||||
chat_widget,
|
||||
config,
|
||||
active_profile,
|
||||
model_saved_to_profile: false,
|
||||
model_saved_to_global: false,
|
||||
file_search,
|
||||
enhanced_keys_supported,
|
||||
transcript_lines: Vec::new(),
|
||||
@@ -291,7 +300,14 @@ impl App {
|
||||
self.chat_widget.set_reasoning_effort(effort);
|
||||
}
|
||||
AppEvent::UpdateModel(model) => {
|
||||
self.chat_widget.set_model(model);
|
||||
self.chat_widget.set_model(model.clone());
|
||||
self.config.model = model.clone();
|
||||
if let Some(family) = find_family_for_model(&model) {
|
||||
self.config.model_family = family;
|
||||
}
|
||||
self.model_saved_to_profile = false;
|
||||
self.model_saved_to_global = false;
|
||||
self.show_model_save_hint();
|
||||
}
|
||||
AppEvent::UpdateAskForApprovalPolicy(policy) => {
|
||||
self.chat_widget.set_approval_policy(policy);
|
||||
@@ -307,6 +323,92 @@ impl App {
|
||||
self.chat_widget.token_usage()
|
||||
}
|
||||
|
||||
fn show_model_save_hint(&mut self) {
|
||||
let model = self.config.model.clone();
|
||||
if self.active_profile.is_some() {
|
||||
self.chat_widget.add_info_message(format!(
|
||||
"Model switched to {model}. Press Ctrl+S to save it for this profile, then press Ctrl+S again to set it as your global default."
|
||||
));
|
||||
} else {
|
||||
self.chat_widget.add_info_message(format!(
|
||||
"Model switched to {model}. Press Ctrl+S to save it as your global default."
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
async fn persist_model_shortcut(&mut self) {
|
||||
enum SaveScope<'a> {
|
||||
Profile(&'a str),
|
||||
Global,
|
||||
AlreadySaved,
|
||||
}
|
||||
|
||||
let scope = if let Some(profile) = self
|
||||
.active_profile
|
||||
.as_deref()
|
||||
.filter(|_| !self.model_saved_to_profile)
|
||||
{
|
||||
SaveScope::Profile(profile)
|
||||
} else if !self.model_saved_to_global {
|
||||
SaveScope::Global
|
||||
} else {
|
||||
SaveScope::AlreadySaved
|
||||
};
|
||||
|
||||
let model = self.config.model.clone();
|
||||
let effort = self.config.model_reasoning_effort;
|
||||
let codex_home = self.config.codex_home.clone();
|
||||
|
||||
match scope {
|
||||
SaveScope::Profile(profile) => {
|
||||
match persist_model_selection(&codex_home, Some(profile), &model, Some(effort))
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
self.model_saved_to_profile = true;
|
||||
self.chat_widget.add_info_message(format!(
|
||||
"Saved model {model} ({effort}) for profile `{profile}`. Press Ctrl+S again to make this your global default."
|
||||
));
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!(
|
||||
error = %err,
|
||||
"failed to persist model selection via shortcut"
|
||||
);
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to save model preference for profile `{profile}`: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
SaveScope::Global => {
|
||||
match persist_model_selection(&codex_home, None, &model, Some(effort)).await {
|
||||
Ok(()) => {
|
||||
self.model_saved_to_global = true;
|
||||
self.chat_widget.add_info_message(format!(
|
||||
"Saved model {model} ({effort}) as your global default."
|
||||
));
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!(
|
||||
error = %err,
|
||||
"failed to persist global model selection via shortcut"
|
||||
);
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to save global model preference: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
SaveScope::AlreadySaved => {
|
||||
self.chat_widget.add_info_message(
|
||||
"Model preference already saved globally; no further action needed."
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_key_event(&mut self, tui: &mut tui::Tui, key_event: KeyEvent) {
|
||||
match key_event {
|
||||
KeyEvent {
|
||||
@@ -320,6 +422,14 @@ impl App {
|
||||
self.overlay = Some(Overlay::new_transcript(self.transcript_lines.clone()));
|
||||
tui.frame_requester().schedule_frame();
|
||||
}
|
||||
KeyEvent {
|
||||
code: KeyCode::Char('s'),
|
||||
modifiers: crossterm::event::KeyModifiers::CONTROL,
|
||||
kind: KeyEventKind::Press,
|
||||
..
|
||||
} => {
|
||||
self.persist_model_shortcut().await;
|
||||
}
|
||||
// Esc primes/advances backtracking only in normal (not working) mode
|
||||
// with an empty composer. In any other state, forward Esc so the
|
||||
// active UI (e.g. status indicator, modals, popups) handles it.
|
||||
|
||||
@@ -1207,7 +1207,7 @@ impl ChatWidget {
|
||||
self.bottom_pane.show_selection_view(
|
||||
"Select model and reasoning level".to_string(),
|
||||
Some("Switch between OpenAI models for this and future Codex CLI session".to_string()),
|
||||
Some("Press Enter to confirm or Esc to go back".to_string()),
|
||||
Some("Press Enter to confirm, Esc to go back, Ctrl+S to save".to_string()),
|
||||
items,
|
||||
);
|
||||
}
|
||||
@@ -1273,6 +1273,16 @@ impl ChatWidget {
|
||||
self.config.model = model;
|
||||
}
|
||||
|
||||
pub(crate) fn add_info_message(&mut self, message: String) {
|
||||
self.add_to_history(history_cell::new_info_event(message));
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
pub(crate) fn add_error_message(&mut self, message: String) {
|
||||
self.add_to_history(history_cell::new_error_event(message));
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
pub(crate) fn add_mcp_output(&mut self) {
|
||||
if self.config.mcp_servers.is_empty() {
|
||||
self.add_to_history(history_cell::empty_mcp_output());
|
||||
|
||||
@@ -1052,6 +1052,12 @@ pub(crate) fn new_mcp_tools_output(
|
||||
PlainHistoryCell { lines }
|
||||
}
|
||||
|
||||
pub(crate) fn new_info_event(message: String) -> PlainHistoryCell {
|
||||
let lines: Vec<Line<'static>> =
|
||||
vec![vec![padded_emoji("💾").green(), " ".into(), message.into()].into()];
|
||||
PlainHistoryCell { lines }
|
||||
}
|
||||
|
||||
pub(crate) fn new_error_event(message: String) -> PlainHistoryCell {
|
||||
// Use a hair space (U+200A) to create a subtle, near-invisible separation
|
||||
// before the text. VS16 is intentionally omitted to keep spacing tighter
|
||||
|
||||
@@ -11,8 +11,10 @@ use codex_core::RolloutRecorder;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_core::config::GPT5_HIGH_MODEL;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_config_as_toml_with_cli_overrides;
|
||||
use codex_core::config::persist_model_selection;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_ollama::DEFAULT_OSS_MODEL;
|
||||
@@ -47,6 +49,7 @@ pub mod live_wrap;
|
||||
mod markdown;
|
||||
mod markdown_render;
|
||||
mod markdown_stream;
|
||||
mod new_model_popup;
|
||||
pub mod onboarding;
|
||||
mod pager_overlay;
|
||||
mod render;
|
||||
@@ -65,12 +68,14 @@ mod wrapping;
|
||||
#[cfg(not(debug_assertions))]
|
||||
mod updates;
|
||||
|
||||
pub use cli::Cli;
|
||||
|
||||
use crate::new_model_popup::ModelUpgradeDecision;
|
||||
use crate::new_model_popup::run_model_upgrade_popup;
|
||||
use crate::onboarding::TrustDirectorySelection;
|
||||
use crate::onboarding::onboarding_screen::OnboardingScreenArgs;
|
||||
use crate::onboarding::onboarding_screen::run_onboarding_app;
|
||||
use crate::tui::Tui;
|
||||
pub use cli::Cli;
|
||||
use codex_core::internal_storage::InternalStorage;
|
||||
|
||||
// (tests access modules directly within the crate)
|
||||
|
||||
@@ -174,14 +179,21 @@ pub async fn run_main(
|
||||
}
|
||||
};
|
||||
|
||||
let cli_profile_override = cli.config_profile.clone();
|
||||
let active_profile = cli_profile_override
|
||||
.clone()
|
||||
.or_else(|| config_toml.profile.clone());
|
||||
|
||||
let should_show_trust_screen = determine_repo_trust_state(
|
||||
&mut config,
|
||||
&config_toml,
|
||||
approval_policy,
|
||||
sandbox_mode,
|
||||
cli.config_profile.clone(),
|
||||
cli_profile_override,
|
||||
)?;
|
||||
|
||||
let internal_storage = InternalStorage::load(&config.codex_home);
|
||||
|
||||
let log_dir = codex_core::config::log_dir(&config)?;
|
||||
std::fs::create_dir_all(&log_dir)?;
|
||||
// Open (or create) your log file, appending to it.
|
||||
@@ -224,14 +236,22 @@ pub async fn run_main(
|
||||
|
||||
let _ = tracing_subscriber::registry().with(file_layer).try_init();
|
||||
|
||||
run_ratatui_app(cli, config, should_show_trust_screen)
|
||||
.await
|
||||
.map_err(|err| std::io::Error::other(err.to_string()))
|
||||
run_ratatui_app(
|
||||
cli,
|
||||
config,
|
||||
internal_storage,
|
||||
active_profile,
|
||||
should_show_trust_screen,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| std::io::Error::other(err.to_string()))
|
||||
}
|
||||
|
||||
async fn run_ratatui_app(
|
||||
cli: Cli,
|
||||
config: Config,
|
||||
mut internal_storage: InternalStorage,
|
||||
active_profile: Option<String>,
|
||||
should_show_trust_screen: bool,
|
||||
) -> color_eyre::Result<codex_core::protocol::TokenUsage> {
|
||||
let mut config = config;
|
||||
@@ -300,14 +320,6 @@ async fn run_ratatui_app(
|
||||
// Initialize high-fidelity session event logging if enabled.
|
||||
session_log::maybe_init(&config);
|
||||
|
||||
let Cli {
|
||||
prompt,
|
||||
images,
|
||||
resume,
|
||||
r#continue,
|
||||
..
|
||||
} = cli;
|
||||
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone());
|
||||
let login_status = get_login_status(&config);
|
||||
let should_show_onboarding =
|
||||
@@ -330,7 +342,7 @@ async fn run_ratatui_app(
|
||||
}
|
||||
}
|
||||
|
||||
let resume_selection = if r#continue {
|
||||
let resume_selection = if cli.r#continue {
|
||||
match RolloutRecorder::list_conversations(&config.codex_home, 1, None).await {
|
||||
Ok(page) => page
|
||||
.items
|
||||
@@ -339,7 +351,7 @@ async fn run_ratatui_app(
|
||||
.unwrap_or(resume_picker::ResumeSelection::StartFresh),
|
||||
Err(_) => resume_picker::ResumeSelection::StartFresh,
|
||||
}
|
||||
} else if resume {
|
||||
} else if cli.resume {
|
||||
match resume_picker::run_resume_picker(&mut tui, &config.codex_home).await? {
|
||||
resume_picker::ResumeSelection::Exit => {
|
||||
restore();
|
||||
@@ -352,10 +364,42 @@ async fn run_ratatui_app(
|
||||
resume_picker::ResumeSelection::StartFresh
|
||||
};
|
||||
|
||||
if should_show_model_rollout_prompt(
|
||||
&cli,
|
||||
&config,
|
||||
active_profile.as_deref(),
|
||||
internal_storage.gpt_5_high_model_prompt_seen,
|
||||
) {
|
||||
internal_storage.gpt_5_high_model_prompt_seen = true;
|
||||
if let Err(e) = internal_storage.persist().await {
|
||||
error!("Failed to persist internal storage: {e:?}");
|
||||
}
|
||||
|
||||
let upgrade_decision = run_model_upgrade_popup(&mut tui).await?;
|
||||
let switch_to_new_model = upgrade_decision == ModelUpgradeDecision::Switch;
|
||||
|
||||
if switch_to_new_model {
|
||||
config.model = GPT5_HIGH_MODEL.to_owned();
|
||||
if let Err(e) = persist_model_selection(
|
||||
&config.codex_home,
|
||||
active_profile.as_deref(),
|
||||
&config.model,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!("Failed to persist model selection: {e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let Cli { prompt, images, .. } = cli;
|
||||
|
||||
let app_result = App::run(
|
||||
&mut tui,
|
||||
auth_manager,
|
||||
config,
|
||||
active_profile,
|
||||
prompt,
|
||||
images,
|
||||
resume_selection,
|
||||
@@ -463,11 +507,44 @@ fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool
|
||||
login_status == LoginStatus::NotAuthenticated
|
||||
}
|
||||
|
||||
fn should_show_model_rollout_prompt(
|
||||
cli: &Cli,
|
||||
config: &Config,
|
||||
active_profile: Option<&str>,
|
||||
gpt_5_high_model_prompt_seen: bool,
|
||||
) -> bool {
|
||||
// TODO(jif) drop.
|
||||
let debug_high_enabled = std::env::var("DEBUG_HIGH")
|
||||
.map(|v| v.eq_ignore_ascii_case("1"))
|
||||
.unwrap_or(false);
|
||||
|
||||
active_profile.is_none()
|
||||
&& debug_high_enabled
|
||||
&& cli.model.is_none()
|
||||
&& !gpt_5_high_model_prompt_seen
|
||||
&& config.model_provider.requires_openai_auth
|
||||
&& !cli.oss
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use clap::Parser;
|
||||
use std::sync::Once;
|
||||
|
||||
fn enable_debug_high_env() {
|
||||
static DEBUG_HIGH_ONCE: Once = Once::new();
|
||||
DEBUG_HIGH_ONCE.call_once(|| {
|
||||
// SAFETY: Tests run in a controlled environment and require this env variable to
|
||||
// opt into the GPT-5 High rollout prompt gating. We only set it once.
|
||||
unsafe {
|
||||
std::env::set_var("DEBUG_HIGH", "1");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn make_config() -> Config {
|
||||
enable_debug_high_env();
|
||||
Config::load_from_base_config_with_overrides(
|
||||
ConfigToml::default(),
|
||||
ConfigOverrides::default(),
|
||||
@@ -484,4 +561,37 @@ mod tests {
|
||||
&cfg
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn shows_model_rollout_prompt_for_default_model() {
|
||||
let cli = Cli::parse_from(["codex"]);
|
||||
let cfg = make_config();
|
||||
assert!(should_show_model_rollout_prompt(&cli, &cfg, None, false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hides_model_rollout_prompt_when_marked_seen() {
|
||||
let cli = Cli::parse_from(["codex"]);
|
||||
let cfg = make_config();
|
||||
assert!(!should_show_model_rollout_prompt(&cli, &cfg, None, true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hides_model_rollout_prompt_when_cli_overrides_model() {
|
||||
let cli = Cli::parse_from(["codex", "--model", "gpt-4.1"]);
|
||||
let cfg = make_config();
|
||||
assert!(!should_show_model_rollout_prompt(&cli, &cfg, None, false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hides_model_rollout_prompt_when_profile_active() {
|
||||
let cli = Cli::parse_from(["codex"]);
|
||||
let cfg = make_config();
|
||||
assert!(!should_show_model_rollout_prompt(
|
||||
&cli,
|
||||
&cfg,
|
||||
Some("gpt5"),
|
||||
false,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
155
codex-rs/tui/src/new_model_popup.rs
Normal file
155
codex-rs/tui/src/new_model_popup.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use crate::tui::FrameRequester;
|
||||
use crate::tui::Tui;
|
||||
use crate::tui::TuiEvent;
|
||||
use codex_core::config::GPT5_HIGH_MODEL;
|
||||
use color_eyre::eyre::Result;
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::prelude::Widget;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::Clear;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use ratatui::widgets::Wrap;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum ModelUpgradeDecision {
|
||||
Switch,
|
||||
KeepCurrent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum ModelUpgradeOption {
|
||||
TryNewModel,
|
||||
KeepCurrent,
|
||||
}
|
||||
|
||||
struct ModelUpgradePopup {
|
||||
highlighted: ModelUpgradeOption,
|
||||
decision: Option<ModelUpgradeDecision>,
|
||||
request_frame: FrameRequester,
|
||||
}
|
||||
|
||||
impl ModelUpgradePopup {
|
||||
fn new(request_frame: FrameRequester) -> Self {
|
||||
Self {
|
||||
highlighted: ModelUpgradeOption::TryNewModel,
|
||||
decision: None,
|
||||
request_frame,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_key_event(&mut self, key_event: KeyEvent) {
|
||||
match key_event.code {
|
||||
KeyCode::Up | KeyCode::Char('k') => self.highlight(ModelUpgradeOption::TryNewModel),
|
||||
KeyCode::Down | KeyCode::Char('j') => self.highlight(ModelUpgradeOption::KeepCurrent),
|
||||
KeyCode::Char('1') => self.select(ModelUpgradeOption::TryNewModel),
|
||||
KeyCode::Char('2') => self.select(ModelUpgradeOption::KeepCurrent),
|
||||
KeyCode::Enter => self.select(self.highlighted),
|
||||
KeyCode::Esc => self.select(ModelUpgradeOption::KeepCurrent),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn highlight(&mut self, option: ModelUpgradeOption) {
|
||||
if self.highlighted != option {
|
||||
self.highlighted = option;
|
||||
self.request_frame.schedule_frame();
|
||||
}
|
||||
}
|
||||
|
||||
fn select(&mut self, option: ModelUpgradeOption) {
|
||||
self.decision = Some(option.into());
|
||||
self.request_frame.schedule_frame();
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ModelUpgradeOption> for ModelUpgradeDecision {
|
||||
fn from(option: ModelUpgradeOption) -> Self {
|
||||
match option {
|
||||
ModelUpgradeOption::TryNewModel => ModelUpgradeDecision::Switch,
|
||||
ModelUpgradeOption::KeepCurrent => ModelUpgradeDecision::KeepCurrent,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &ModelUpgradePopup {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
Clear.render(area, buf);
|
||||
|
||||
let mut lines: Vec<Line> = vec![
|
||||
Line::from(vec![
|
||||
"> ".into(),
|
||||
format!("Try {GPT5_HIGH_MODEL} as your default model").bold(),
|
||||
]),
|
||||
format!(" {GPT5_HIGH_MODEL} is our latest model tuned for coding workflows.").into(),
|
||||
" Switch now or keep your current default – you can change models any time.".into(),
|
||||
"".into(),
|
||||
];
|
||||
|
||||
let create_option =
|
||||
|index: usize, option: ModelUpgradeOption, text: &str| -> Line<'static> {
|
||||
if self.highlighted == option {
|
||||
Line::from(vec![
|
||||
format!("> {}. ", index + 1).cyan(),
|
||||
text.to_owned().cyan(),
|
||||
])
|
||||
} else {
|
||||
format!(" {}. {text}", index + 1).into()
|
||||
}
|
||||
};
|
||||
|
||||
lines.push(create_option(
|
||||
0,
|
||||
ModelUpgradeOption::TryNewModel,
|
||||
&format!("Yes, switch me to {GPT5_HIGH_MODEL}"),
|
||||
));
|
||||
lines.push(create_option(
|
||||
1,
|
||||
ModelUpgradeOption::KeepCurrent,
|
||||
"Not right now",
|
||||
));
|
||||
lines.push("".into());
|
||||
lines.push(
|
||||
" Press Enter to confirm or Esc to keep your current model"
|
||||
.dim()
|
||||
.into(),
|
||||
);
|
||||
|
||||
Paragraph::new(lines)
|
||||
.wrap(Wrap { trim: false })
|
||||
.render(area, buf);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn run_model_upgrade_popup(tui: &mut Tui) -> Result<ModelUpgradeDecision> {
|
||||
let mut popup = ModelUpgradePopup::new(tui.frame_requester());
|
||||
|
||||
tui.draw(u16::MAX, |frame| {
|
||||
frame.render_widget_ref(&popup, frame.area());
|
||||
})?;
|
||||
|
||||
let events = tui.event_stream();
|
||||
tokio::pin!(events);
|
||||
while popup.decision.is_none() {
|
||||
if let Some(event) = events.next().await {
|
||||
match event {
|
||||
TuiEvent::Key(key_event) => popup.handle_key_event(key_event),
|
||||
TuiEvent::Draw => {
|
||||
let _ = tui.draw(u16::MAX, |frame| {
|
||||
frame.render_widget_ref(&popup, frame.area());
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(popup.decision.unwrap_or(ModelUpgradeDecision::KeepCurrent))
|
||||
}
|
||||
Reference in New Issue
Block a user