chore: remove model upgrade popup (#4332)

This commit is contained in:
Thibault Sottiaux
2025-09-27 13:25:09 -07:00
committed by GitHub
parent bcf2bc0aa5
commit d7286e9829
5 changed files with 4 additions and 454 deletions

View File

@@ -1,89 +0,0 @@
use anyhow::Context;
use serde::Deserialize;
use serde::Serialize;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
pub(crate) const INTERNAL_STORAGE_FILE: &str = "internal_storage.json";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InternalStorage {
#[serde(skip)]
storage_path: PathBuf,
#[serde(default = "default_gpt_5_codex_model_prompt_seen")]
pub gpt_5_codex_model_prompt_seen: bool,
}
const fn default_gpt_5_codex_model_prompt_seen() -> bool {
true
}
impl Default for InternalStorage {
fn default() -> Self {
Self {
storage_path: PathBuf::new(),
gpt_5_codex_model_prompt_seen: default_gpt_5_codex_model_prompt_seen(),
}
}
}
// TODO(jif) generalise all the file writers and build proper async channel inserters.
impl InternalStorage {
pub fn load(codex_home: &Path) -> Self {
let storage_path = codex_home.join(INTERNAL_STORAGE_FILE);
match std::fs::read_to_string(&storage_path) {
Ok(serialized) => match serde_json::from_str::<Self>(&serialized) {
Ok(mut storage) => {
storage.storage_path = storage_path;
storage
}
Err(error) => {
tracing::warn!("failed to parse internal storage: {error:?}");
Self::empty(storage_path)
}
},
Err(error) => {
if error.kind() == ErrorKind::NotFound {
tracing::debug!(
"internal storage not found at {}; initializing defaults",
storage_path.display()
);
} else {
tracing::warn!("failed to read internal storage: {error:?}");
}
Self::empty(storage_path)
}
}
}
fn empty(storage_path: PathBuf) -> Self {
Self {
storage_path,
..Default::default()
}
}
pub async fn persist(&self) -> anyhow::Result<()> {
let serialized = serde_json::to_string_pretty(self)?;
if let Some(parent) = self.storage_path.parent() {
tokio::fs::create_dir_all(parent).await.with_context(|| {
format!(
"failed to create internal storage directory at {}",
parent.display()
)
})?;
}
tokio::fs::write(&self.storage_path, serialized)
.await
.with_context(|| {
format!(
"failed to persist internal storage at {}",
self.storage_path.display()
)
})
}
}

View File

@@ -29,7 +29,6 @@ mod exec_command;
pub mod exec_env;
mod flags;
pub mod git_info;
pub mod internal_storage;
pub mod landlock;
mod mcp_connection_manager;
mod mcp_tool_call;

View File

@@ -90,6 +90,7 @@ impl AsciiAnimation {
true
}
#[allow(dead_code)]
pub(crate) fn request_frame(&self) {
self.request_frame.schedule_frame();
}

View File

@@ -12,10 +12,8 @@ use codex_core::RolloutRecorder;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::GPT_5_CODEX_MEDIUM_MODEL;
use codex_core::config::find_codex_home;
use codex_core::config::load_config_as_toml_with_cli_overrides;
use codex_core::config::persist_model_selection;
use codex_core::find_conversation_path_by_id_str;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::SandboxPolicy;
@@ -54,7 +52,6 @@ pub mod live_wrap;
mod markdown;
mod markdown_render;
mod markdown_stream;
mod new_model_popup;
pub mod onboarding;
mod pager_overlay;
mod render;
@@ -79,14 +76,11 @@ pub mod test_backend;
#[cfg(not(debug_assertions))]
mod updates;
use crate::new_model_popup::ModelUpgradeDecision;
use crate::new_model_popup::run_model_upgrade_popup;
use crate::onboarding::TrustDirectorySelection;
use crate::onboarding::onboarding_screen::OnboardingScreenArgs;
use crate::onboarding::onboarding_screen::run_onboarding_app;
use crate::tui::Tui;
pub use cli::Cli;
use codex_core::internal_storage::InternalStorage;
// (tests access modules directly within the crate)
@@ -204,8 +198,6 @@ pub async fn run_main(
cli_profile_override,
)?;
let internal_storage = InternalStorage::load(&config.codex_home);
let log_dir = codex_core::config::log_dir(&config)?;
std::fs::create_dir_all(&log_dir)?;
// Open (or create) your log file, appending to it.
@@ -249,21 +241,14 @@ pub async fn run_main(
let _ = tracing_subscriber::registry().with(file_layer).try_init();
run_ratatui_app(
cli,
config,
internal_storage,
active_profile,
should_show_trust_screen,
)
.await
.map_err(|err| std::io::Error::other(err.to_string()))
run_ratatui_app(cli, config, active_profile, should_show_trust_screen)
.await
.map_err(|err| std::io::Error::other(err.to_string()))
}
async fn run_ratatui_app(
cli: Cli,
config: Config,
mut internal_storage: InternalStorage,
active_profile: Option<String>,
should_show_trust_screen: bool,
) -> color_eyre::Result<AppExitInfo> {
@@ -389,36 +374,6 @@ async fn run_ratatui_app(
resume_picker::ResumeSelection::StartFresh
};
if should_show_model_rollout_prompt(
&cli,
&config,
active_profile.as_deref(),
internal_storage.gpt_5_codex_model_prompt_seen,
) {
internal_storage.gpt_5_codex_model_prompt_seen = true;
if let Err(e) = internal_storage.persist().await {
error!("Failed to persist internal storage: {e:?}");
}
let upgrade_decision = run_model_upgrade_popup(&mut tui).await?;
let switch_to_new_model = upgrade_decision == ModelUpgradeDecision::Switch;
if switch_to_new_model {
config.model = GPT_5_CODEX_MEDIUM_MODEL.to_owned();
config.model_reasoning_effort = None;
if let Err(e) = persist_model_selection(
&config.codex_home,
active_profile.as_deref(),
&config.model,
config.model_reasoning_effort,
)
.await
{
error!("Failed to persist model selection: {e:?}");
}
}
}
let Cli { prompt, images, .. } = cli;
let app_result = App::run(
@@ -532,140 +487,3 @@ fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool
login_status == LoginStatus::NotAuthenticated
}
fn should_show_model_rollout_prompt(
cli: &Cli,
config: &Config,
active_profile: Option<&str>,
gpt_5_codex_model_prompt_seen: bool,
) -> bool {
let login_status = get_login_status(config);
active_profile.is_none()
&& cli.model.is_none()
&& !gpt_5_codex_model_prompt_seen
&& config.model_provider.requires_openai_auth
&& matches!(login_status, LoginStatus::AuthMode(AuthMode::ChatGPT))
&& !cli.oss
}
#[cfg(test)]
mod tests {
use super::*;
use clap::Parser;
use codex_core::auth::AuthDotJson;
use codex_core::auth::get_auth_file;
use codex_core::auth::login_with_api_key;
use codex_core::auth::write_auth_json;
use codex_core::token_data::IdTokenInfo;
use codex_core::token_data::TokenData;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
fn get_next_codex_home() -> PathBuf {
static NEXT_CODEX_HOME_ID: AtomicUsize = AtomicUsize::new(0);
let mut codex_home = std::env::temp_dir();
let unique_suffix = format!(
"codex_tui_test_{}_{}",
std::process::id(),
NEXT_CODEX_HOME_ID.fetch_add(1, Ordering::Relaxed)
);
codex_home.push(unique_suffix);
codex_home
}
fn make_config() -> Config {
// Create a unique CODEX_HOME per test to isolate auth.json writes.
let codex_home = get_next_codex_home();
std::fs::create_dir_all(&codex_home).expect("create unique CODEX_HOME");
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home,
)
.expect("load default config")
}
/// Test helper to write an `auth.json` with the requested auth mode into the
/// provided CODEX_HOME directory. This ensures `get_login_status()` reads the
/// intended mode deterministically.
fn set_auth_method(codex_home: &std::path::Path, mode: AuthMode) {
match mode {
AuthMode::ApiKey => {
login_with_api_key(codex_home, "sk-test-key").expect("write api key auth.json");
}
AuthMode::ChatGPT => {
// Minimal valid JWT payload: header.payload.signature (all base64url, no padding)
const FAKE_JWT: &str = "eyJhbGciOiJub25lIiwidHlwIjoiSldUIn0.e30.c2ln"; // {"alg":"none","typ":"JWT"}.{}."sig"
let mut id_info = IdTokenInfo::default();
id_info.raw_jwt = FAKE_JWT.to_string();
let auth = AuthDotJson {
openai_api_key: None,
tokens: Some(TokenData {
id_token: id_info,
access_token: "access-token".to_string(),
refresh_token: "refresh-token".to_string(),
account_id: None,
}),
last_refresh: None,
};
let file = get_auth_file(codex_home);
write_auth_json(&file, &auth).expect("write chatgpt auth.json");
}
}
}
#[test]
fn shows_login_when_not_authenticated() {
let cfg = make_config();
assert!(should_show_login_screen(
LoginStatus::NotAuthenticated,
&cfg
));
}
#[test]
fn shows_model_rollout_prompt_for_default_model() {
let cli = Cli::parse_from(["codex"]);
let cfg = make_config();
set_auth_method(&cfg.codex_home, AuthMode::ChatGPT);
assert!(should_show_model_rollout_prompt(&cli, &cfg, None, false,));
}
#[test]
fn hides_model_rollout_prompt_when_api_auth_mode() {
let cli = Cli::parse_from(["codex"]);
let cfg = make_config();
set_auth_method(&cfg.codex_home, AuthMode::ApiKey);
assert!(!should_show_model_rollout_prompt(&cli, &cfg, None, false,));
}
#[test]
fn hides_model_rollout_prompt_when_marked_seen() {
let cli = Cli::parse_from(["codex"]);
let cfg = make_config();
set_auth_method(&cfg.codex_home, AuthMode::ChatGPT);
assert!(!should_show_model_rollout_prompt(&cli, &cfg, None, true,));
}
#[test]
fn hides_model_rollout_prompt_when_cli_overrides_model() {
let cli = Cli::parse_from(["codex", "--model", "gpt-4.1"]);
let cfg = make_config();
set_auth_method(&cfg.codex_home, AuthMode::ChatGPT);
assert!(!should_show_model_rollout_prompt(&cli, &cfg, None, false,));
}
#[test]
fn hides_model_rollout_prompt_when_profile_active() {
let cli = Cli::parse_from(["codex"]);
let cfg = make_config();
set_auth_method(&cfg.codex_home, AuthMode::ChatGPT);
assert!(!should_show_model_rollout_prompt(
&cli,
&cfg,
Some("gpt5"),
false,
));
}
}

View File

@@ -1,179 +0,0 @@
use crate::ascii_animation::AsciiAnimation;
use crate::tui::FrameRequester;
use crate::tui::Tui;
use crate::tui::TuiEvent;
use color_eyre::eyre::Result;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::prelude::Widget;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::widgets::Clear;
use ratatui::widgets::Paragraph;
use ratatui::widgets::WidgetRef;
use ratatui::widgets::Wrap;
use tokio_stream::StreamExt;
const MIN_ANIMATION_HEIGHT: u16 = 24;
const MIN_ANIMATION_WIDTH: u16 = 60;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum ModelUpgradeDecision {
Switch,
KeepCurrent,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ModelUpgradeOption {
TryNewModel,
KeepCurrent,
}
struct ModelUpgradePopup {
highlighted: ModelUpgradeOption,
decision: Option<ModelUpgradeDecision>,
animation: AsciiAnimation,
}
impl ModelUpgradePopup {
fn new(request_frame: FrameRequester) -> Self {
Self {
highlighted: ModelUpgradeOption::TryNewModel,
decision: None,
animation: AsciiAnimation::new(request_frame),
}
}
fn handle_key_event(&mut self, key_event: KeyEvent) {
match key_event.code {
KeyCode::Up | KeyCode::Char('k') => self.highlight(ModelUpgradeOption::TryNewModel),
KeyCode::Down | KeyCode::Char('j') => self.highlight(ModelUpgradeOption::KeepCurrent),
KeyCode::Char('1') => self.select(ModelUpgradeOption::TryNewModel),
KeyCode::Char('2') => self.select(ModelUpgradeOption::KeepCurrent),
KeyCode::Enter => self.select(self.highlighted),
KeyCode::Esc => self.select(ModelUpgradeOption::KeepCurrent),
KeyCode::Char('.') => {
if key_event.modifiers.contains(KeyModifiers::CONTROL) {
let _ = self.animation.pick_random_variant();
}
}
_ => {}
}
}
fn highlight(&mut self, option: ModelUpgradeOption) {
if self.highlighted != option {
self.highlighted = option;
self.animation.request_frame();
}
}
fn select(&mut self, option: ModelUpgradeOption) {
self.decision = Some(option.into());
self.animation.request_frame();
}
}
impl From<ModelUpgradeOption> for ModelUpgradeDecision {
fn from(option: ModelUpgradeOption) -> Self {
match option {
ModelUpgradeOption::TryNewModel => ModelUpgradeDecision::Switch,
ModelUpgradeOption::KeepCurrent => ModelUpgradeDecision::KeepCurrent,
}
}
}
impl WidgetRef for &ModelUpgradePopup {
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
Clear.render(area, buf);
self.animation.schedule_next_frame();
// Skip the animation entirely when the viewport is too small so we don't clip frames.
let show_animation =
area.height >= MIN_ANIMATION_HEIGHT && area.width >= MIN_ANIMATION_WIDTH;
let mut lines: Vec<Line> = Vec::new();
if show_animation {
let frame = self.animation.current_frame();
lines.extend(frame.lines().map(Into::into));
// Spacer between animation and text content.
lines.push("".into());
}
lines.push(Line::from(vec![
" ".into(),
"Introducing GPT-5-Codex".bold(),
]));
lines.push("".into());
lines.push(
" GPT-5-Codex works faster through easy tasks and harder on complex tasks,".into(),
);
lines.push(" improves on code quality, and is more steerable with AGENTS.md.".into());
lines.push("".into());
let create_option =
|index: usize, option: ModelUpgradeOption, text: &str| -> Line<'static> {
if self.highlighted == option {
Line::from(vec![
format!("> {}. ", index + 1).cyan(),
text.to_owned().cyan(),
])
} else {
format!(" {}. {text}", index + 1).into()
}
};
lines.push(create_option(
0,
ModelUpgradeOption::TryNewModel,
"Try the new GPT-5-Codex model",
));
lines.push("".into());
lines.push(create_option(
1,
ModelUpgradeOption::KeepCurrent,
"Continue using current model",
));
lines.push("".into());
lines.push(
" Press Enter to confirm or Esc to keep your current model"
.dim()
.into(),
);
Paragraph::new(lines)
.wrap(Wrap { trim: false })
.render(area, buf);
}
}
pub(crate) async fn run_model_upgrade_popup(tui: &mut Tui) -> Result<ModelUpgradeDecision> {
let mut popup = ModelUpgradePopup::new(tui.frame_requester());
tui.draw(u16::MAX, |frame| {
frame.render_widget_ref(&popup, frame.area());
})?;
let events = tui.event_stream();
tokio::pin!(events);
while popup.decision.is_none() {
if let Some(event) = events.next().await {
match event {
TuiEvent::Key(key_event) => popup.handle_key_event(key_event),
TuiEvent::Draw => {
let _ = tui.draw(u16::MAX, |frame| {
frame.render_widget_ref(&popup, frame.area());
});
}
_ => {}
}
} else {
break;
}
}
Ok(popup.decision.unwrap_or(ModelUpgradeDecision::KeepCurrent))
}