Show login options when not signed in with ChatGPT (#2440)
Motivation: we have users who uses their API key although they want to use ChatGPT account. We want to give them the chance to always login with their account. This PR displays login options when the user is not signed in with ChatGPT. Even if you have set an OpenAI API key as an environment variable, you will still be prompted to log in with ChatGPT. We’ve also added a new flag, `always_use_api_key_signing` false by default, which ensures you are never asked to log in with ChatGPT and always defaults to using your API key. https://github.com/user-attachments/assets/b61ebfa9-3c5e-4ab7-bf94-395c23a0e0af After ChatGPT sign in: https://github.com/user-attachments/assets/d58b366b-c46a-428f-a22f-2ac230f991c0
This commit is contained in:
30
README.md
30
README.md
@@ -22,6 +22,7 @@
|
||||
- [Authenticate locally and copy your credentials to the "headless" machine](#authenticate-locally-and-copy-your-credentials-to-the-headless-machine)
|
||||
- [Connecting through VPS or remote](#connecting-through-vps-or-remote)
|
||||
- [Usage-based billing alternative: Use an OpenAI API key](#usage-based-billing-alternative-use-an-openai-api-key)
|
||||
- [Forcing a specific auth method (advanced)](#forcing-a-specific-auth-method-advanced)
|
||||
- [Choosing Codex's level of autonomy](#choosing-codexs-level-of-autonomy)
|
||||
- [**1. Read/write**](#1-readwrite)
|
||||
- [**2. Read-only**](#2-read-only)
|
||||
@@ -165,6 +166,35 @@ Notes:
|
||||
- This command only sets the key for your current terminal session, which we recommend. To set it for all future sessions, you can also add the `export` line to your shell's configuration file (e.g., `~/.zshrc`).
|
||||
- If you have signed in with ChatGPT, Codex will default to using your ChatGPT credits. If you wish to use your API key, use the `/logout` command to clear your ChatGPT authentication.
|
||||
|
||||
#### Forcing a specific auth method (advanced)
|
||||
|
||||
You can explicitly choose which authentication Codex should prefer when both are available.
|
||||
|
||||
- To always use your API key (even when ChatGPT auth exists), set:
|
||||
|
||||
```toml
|
||||
# ~/.codex/config.toml
|
||||
preferred_auth_method = "apikey"
|
||||
```
|
||||
|
||||
Or override ad-hoc via CLI:
|
||||
|
||||
```bash
|
||||
codex --config preferred_auth_method="apikey"
|
||||
```
|
||||
|
||||
- To prefer ChatGPT auth (default), set:
|
||||
|
||||
```toml
|
||||
# ~/.codex/config.toml
|
||||
preferred_auth_method = "chatgpt"
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- When `preferred_auth_method = "apikey"` and an API key is available, the login screen is skipped.
|
||||
- When `preferred_auth_method = "chatgpt"` (default), Codex prefers ChatGPT auth if present; if only an API key is present, it will use the API key. Certain account types may also require API-key mode.
|
||||
|
||||
### Choosing Codex's level of autonomy
|
||||
|
||||
We always recommend running Codex in its default sandbox that gives you strong guardrails around what the agent can do. The default sandbox prevents it from editing files outside its workspace, or from accessing the network.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
@@ -19,7 +20,7 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||
let auth = CodexAuth::from_codex_home(codex_home, AuthMode::ChatGPT)?;
|
||||
if let Some(auth) = auth {
|
||||
let token_data = auth.get_token_data().await?;
|
||||
set_chatgpt_token_data(token_data);
|
||||
|
||||
@@ -60,7 +60,7 @@ pub async fn run_login_with_api_key(
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
|
||||
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||
match CodexAuth::from_codex_home(&config.codex_home, config.preferred_auth_method) {
|
||||
Ok(Some(auth)) => match auth.mode {
|
||||
AuthMode::ApiKey => match auth.get_token().await {
|
||||
Ok(api_key) => {
|
||||
|
||||
@@ -13,6 +13,7 @@ use crate::model_provider_info::built_in_model_providers;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use codex_login::AuthMode;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -163,6 +164,9 @@ pub struct Config {
|
||||
|
||||
/// The value for the `originator` header included with Responses API requests.
|
||||
pub internal_originator: Option<String>,
|
||||
|
||||
/// If set to `true`, the API key will be signed with the `originator` header.
|
||||
pub preferred_auth_method: AuthMode,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -409,6 +413,9 @@ pub struct ConfigToml {
|
||||
pub internal_originator: Option<String>,
|
||||
|
||||
pub projects: Option<HashMap<String, ProjectConfig>>,
|
||||
|
||||
/// If set to `true`, the API key will be signed with the `originator` header.
|
||||
pub preferred_auth_method: Option<AuthMode>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
@@ -672,6 +679,7 @@ impl Config {
|
||||
include_plan_tool: include_plan_tool.unwrap_or(false),
|
||||
include_apply_patch_tool: include_apply_patch_tool_val,
|
||||
internal_originator: cfg.internal_originator,
|
||||
preferred_auth_method: cfg.preferred_auth_method.unwrap_or(AuthMode::ChatGPT),
|
||||
};
|
||||
Ok(config)
|
||||
}
|
||||
@@ -1036,6 +1044,7 @@ disable_response_storage = true
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
internal_originator: None,
|
||||
preferred_auth_method: AuthMode::ChatGPT,
|
||||
},
|
||||
o3_profile_config
|
||||
);
|
||||
@@ -1088,6 +1097,7 @@ disable_response_storage = true
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
internal_originator: None,
|
||||
preferred_auth_method: AuthMode::ChatGPT,
|
||||
};
|
||||
|
||||
assert_eq!(expected_gpt3_profile_config, gpt3_profile_config);
|
||||
@@ -1155,6 +1165,7 @@ disable_response_storage = true
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
internal_originator: None,
|
||||
preferred_auth_method: AuthMode::ChatGPT,
|
||||
};
|
||||
|
||||
assert_eq!(expected_zdr_profile_config, zdr_profile_config);
|
||||
|
||||
@@ -40,7 +40,7 @@ impl Default for ConversationManager {
|
||||
|
||||
impl ConversationManager {
|
||||
pub async fn new_conversation(&self, config: Config) -> CodexResult<NewConversation> {
|
||||
let auth = CodexAuth::from_codex_home(&config.codex_home)?;
|
||||
let auth = CodexAuth::from_codex_home(&config.codex_home, config.preferred_auth_method)?;
|
||||
self.new_conversation_with_auth(config, auth).await
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
@@ -54,6 +55,59 @@ fn assert_message_ends_with(request_body: &serde_json::Value, text: &str) {
|
||||
);
|
||||
}
|
||||
|
||||
/// Writes an `auth.json` into the provided `codex_home` with the specified parameters.
|
||||
/// Returns the fake JWT string written to `tokens.id_token`.
|
||||
#[expect(clippy::unwrap_used)]
|
||||
fn write_auth_json(
|
||||
codex_home: &TempDir,
|
||||
openai_api_key: Option<&str>,
|
||||
chatgpt_plan_type: &str,
|
||||
access_token: &str,
|
||||
account_id: Option<&str>,
|
||||
) -> String {
|
||||
use base64::Engine as _;
|
||||
use serde_json::json;
|
||||
|
||||
let header = json!({ "alg": "none", "typ": "JWT" });
|
||||
let payload = json!({
|
||||
"email": "user@example.com",
|
||||
"https://api.openai.com/auth": {
|
||||
"chatgpt_plan_type": chatgpt_plan_type,
|
||||
"chatgpt_account_id": account_id.unwrap_or("acc-123")
|
||||
}
|
||||
});
|
||||
|
||||
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
|
||||
let header_b64 = b64(&serde_json::to_vec(&header).unwrap());
|
||||
let payload_b64 = b64(&serde_json::to_vec(&payload).unwrap());
|
||||
let signature_b64 = b64(b"sig");
|
||||
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
|
||||
|
||||
let mut tokens = json!({
|
||||
"id_token": fake_jwt,
|
||||
"access_token": access_token,
|
||||
"refresh_token": "refresh-test",
|
||||
});
|
||||
if let Some(acc) = account_id {
|
||||
tokens["account_id"] = json!(acc);
|
||||
}
|
||||
|
||||
let auth_json = json!({
|
||||
"OPENAI_API_KEY": openai_api_key,
|
||||
"tokens": tokens,
|
||||
// RFC3339 datetime; value doesn't matter for these tests
|
||||
"last_refresh": "2025-08-06T20:41:36.232376Z",
|
||||
});
|
||||
|
||||
std::fs::write(
|
||||
codex_home.path().join("auth.json"),
|
||||
serde_json::to_string_pretty(&auth_json).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fake_jwt
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn includes_session_id_and_model_headers_in_request() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
@@ -311,6 +365,156 @@ async fn chatgpt_auth_sends_correct_request() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn prefers_chatgpt_token_when_config_prefers_chatgpt() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
|
||||
// Expect ChatGPT base path and correct headers
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.and(header_regex("Authorization", r"Bearer Access-123"))
|
||||
.and(header_regex("chatgpt-account-id", r"acc-123"))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
// Write auth.json that contains both API key and ChatGPT tokens for a plan that should prefer ChatGPT.
|
||||
let _jwt = write_auth_json(
|
||||
&codex_home,
|
||||
Some("sk-test-key"),
|
||||
"pro",
|
||||
"Access-123",
|
||||
Some("acc-123"),
|
||||
);
|
||||
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider = model_provider;
|
||||
config.preferred_auth_method = AuthMode::ChatGPT;
|
||||
|
||||
let conversation_manager = ConversationManager::default();
|
||||
let NewConversation {
|
||||
conversation: codex,
|
||||
..
|
||||
} = conversation_manager
|
||||
.new_conversation(config)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
// verify request body flags
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let request_body = request.body_json::<serde_json::Value>().unwrap();
|
||||
assert!(
|
||||
!request_body["store"].as_bool().unwrap(),
|
||||
"store should be false for ChatGPT auth"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
|
||||
// Expect API key header, no ChatGPT account header required.
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.and(header_regex("Authorization", r"Bearer sk-test-key"))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
// Write auth.json that contains both API key and ChatGPT tokens for a plan that should prefer ChatGPT,
|
||||
// but config will force API key preference.
|
||||
let _jwt = write_auth_json(
|
||||
&codex_home,
|
||||
Some("sk-test-key"),
|
||||
"pro",
|
||||
"Access-123",
|
||||
Some("acc-123"),
|
||||
);
|
||||
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider = model_provider;
|
||||
config.preferred_auth_method = AuthMode::ApiKey;
|
||||
|
||||
let conversation_manager = ConversationManager::default();
|
||||
let NewConversation {
|
||||
conversation: codex,
|
||||
..
|
||||
} = conversation_manager
|
||||
.new_conversation(config)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
// verify request body flags
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let request_body = request.body_json::<serde_json::Value>().unwrap();
|
||||
assert!(
|
||||
request_body["store"].as_bool().unwrap(),
|
||||
"store should be true for API key auth"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn includes_user_instructions_message_in_request() {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
@@ -30,7 +30,8 @@ mod token_data;
|
||||
pub const CLIENT_ID: &str = "app_EMoamEEZ73f0CkXaXp7hrann";
|
||||
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Copy)]
|
||||
#[derive(Clone, Debug, PartialEq, Copy, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
@@ -63,8 +64,11 @@ impl CodexAuth {
|
||||
|
||||
/// Loads the available auth information from the auth.json or
|
||||
/// OPENAI_API_KEY environment variable.
|
||||
pub fn from_codex_home(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, true)
|
||||
pub fn from_codex_home(
|
||||
codex_home: &Path,
|
||||
preferred_auth_method: AuthMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, true, preferred_auth_method)
|
||||
}
|
||||
|
||||
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||
@@ -165,7 +169,11 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option<CodexAuth>> {
|
||||
fn load_auth(
|
||||
codex_home: &Path,
|
||||
include_env_var: bool,
|
||||
preferred_auth_method: AuthMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
// First, check to see if there is a valid auth.json file. If not, we fall
|
||||
// back to AuthMode::ApiKey using the OPENAI_API_KEY environment variable
|
||||
// (if it is set).
|
||||
@@ -201,7 +209,7 @@ fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option
|
||||
// "refreshable" even if we are using the API key for auth?
|
||||
match &tokens {
|
||||
Some(tokens) => {
|
||||
if tokens.is_plan_that_should_use_api_key() {
|
||||
if tokens.should_use_api_key(preferred_auth_method) {
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
} else {
|
||||
// Ignore the API key and fall through to ChatGPT auth.
|
||||
@@ -383,7 +391,9 @@ mod tests {
|
||||
fn writes_api_key_and_loads_auth() {
|
||||
let dir = tempdir().unwrap();
|
||||
login_with_api_key(dir.path(), "sk-test-key").unwrap();
|
||||
let auth = load_auth(dir.path(), false).unwrap().unwrap();
|
||||
let auth = load_auth(dir.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key.as_deref(), Some("sk-test-key"));
|
||||
}
|
||||
@@ -395,7 +405,9 @@ mod tests {
|
||||
let env_var = std::env::var(OPENAI_API_KEY_ENV_VAR);
|
||||
|
||||
if let Ok(env_var) = env_var {
|
||||
let auth = load_auth(dir.path(), true).unwrap().unwrap();
|
||||
let auth = load_auth(dir.path(), true, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key, Some(env_var));
|
||||
}
|
||||
@@ -438,7 +450,9 @@ mod tests {
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
@@ -487,7 +501,9 @@ mod tests {
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
@@ -535,7 +551,9 @@ mod tests {
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(Some("sk-test-key".to_string()), api_key);
|
||||
assert_eq!(AuthMode::ApiKey, mode);
|
||||
|
||||
@@ -624,7 +642,9 @@ mod tests {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let auth = load_auth(dir.path(), false).unwrap().unwrap();
|
||||
let auth = load_auth(dir.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key, Some("sk-test-key".to_string()));
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::AuthMode;
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
|
||||
pub struct TokenData {
|
||||
/// Flat info parsed from the JWT in auth.json.
|
||||
@@ -23,7 +25,11 @@ pub struct TokenData {
|
||||
impl TokenData {
|
||||
/// Returns true if this is a plan that should use the traditional
|
||||
/// "metered" billing via an API key.
|
||||
pub(crate) fn is_plan_that_should_use_api_key(&self) -> bool {
|
||||
pub(crate) fn should_use_api_key(&self, preferred_auth_method: AuthMode) -> bool {
|
||||
if preferred_auth_method == AuthMode::ApiKey {
|
||||
return true;
|
||||
}
|
||||
|
||||
self.id_token
|
||||
.chatgpt_plan_type
|
||||
.as_ref()
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use crate::LoginStatus;
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::chatwidget::ChatWidget;
|
||||
use crate::file_search::FileSearchManager;
|
||||
use crate::get_git_diff::get_git_diff;
|
||||
use crate::get_login_status;
|
||||
use crate::onboarding::onboarding_screen::KeyboardHandler;
|
||||
use crate::onboarding::onboarding_screen::OnboardingScreen;
|
||||
use crate::onboarding::onboarding_screen::OnboardingScreenArgs;
|
||||
use crate::should_show_login_screen;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::tui;
|
||||
use codex_core::ConversationManager;
|
||||
@@ -137,8 +138,11 @@ impl App<'_> {
|
||||
});
|
||||
}
|
||||
|
||||
let show_login_screen = should_show_login_screen(&config);
|
||||
let app_state = if show_login_screen || show_trust_screen {
|
||||
let login_status = get_login_status(&config);
|
||||
let should_show_onboarding =
|
||||
should_show_onboarding(login_status, &config, show_trust_screen);
|
||||
let app_state = if should_show_onboarding {
|
||||
let show_login_screen = should_show_login_screen(login_status, &config);
|
||||
let chat_widget_args = ChatWidgetArgs {
|
||||
config: config.clone(),
|
||||
initial_prompt,
|
||||
@@ -150,9 +154,10 @@ impl App<'_> {
|
||||
event_tx: app_event_tx.clone(),
|
||||
codex_home: config.codex_home.clone(),
|
||||
cwd: config.cwd.clone(),
|
||||
show_login_screen,
|
||||
show_trust_screen,
|
||||
show_login_screen,
|
||||
chat_widget_args,
|
||||
login_status,
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
@@ -613,3 +618,77 @@ impl App<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn should_show_onboarding(
|
||||
login_status: LoginStatus,
|
||||
config: &Config,
|
||||
show_trust_screen: bool,
|
||||
) -> bool {
|
||||
if show_trust_screen {
|
||||
return true;
|
||||
}
|
||||
|
||||
should_show_login_screen(login_status, config)
|
||||
}
|
||||
|
||||
fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool {
|
||||
match login_status {
|
||||
LoginStatus::NotAuthenticated => true,
|
||||
LoginStatus::AuthMode(method) => method != config.preferred_auth_method,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_login::AuthMode;
|
||||
|
||||
fn make_config(preferred: AuthMode) -> Config {
|
||||
let mut cfg = Config::load_from_base_config_with_overrides(
|
||||
ConfigToml::default(),
|
||||
ConfigOverrides::default(),
|
||||
std::env::temp_dir(),
|
||||
)
|
||||
.expect("load default config");
|
||||
cfg.preferred_auth_method = preferred;
|
||||
cfg
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn shows_login_when_not_authenticated() {
|
||||
let cfg = make_config(AuthMode::ChatGPT);
|
||||
assert!(should_show_login_screen(
|
||||
LoginStatus::NotAuthenticated,
|
||||
&cfg
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn shows_login_when_api_key_but_prefers_chatgpt() {
|
||||
let cfg = make_config(AuthMode::ChatGPT);
|
||||
assert!(should_show_login_screen(
|
||||
LoginStatus::AuthMode(AuthMode::ApiKey),
|
||||
&cfg
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hides_login_when_api_key_and_prefers_api_key() {
|
||||
let cfg = make_config(AuthMode::ApiKey);
|
||||
assert!(!should_show_login_screen(
|
||||
LoginStatus::AuthMode(AuthMode::ApiKey),
|
||||
&cfg
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hides_login_when_chatgpt_and_prefers_chatgpt() {
|
||||
let cfg = make_config(AuthMode::ChatGPT);
|
||||
assert!(!should_show_login_screen(
|
||||
LoginStatus::AuthMode(AuthMode::ChatGPT),
|
||||
&cfg
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_config_as_toml_with_cli_overrides;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use codex_ollama::DEFAULT_OSS_MODEL;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -296,21 +297,27 @@ fn restore() {
|
||||
}
|
||||
}
|
||||
|
||||
fn should_show_login_screen(config: &Config) -> bool {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LoginStatus {
|
||||
AuthMode(AuthMode),
|
||||
NotAuthenticated,
|
||||
}
|
||||
|
||||
fn get_login_status(config: &Config) -> LoginStatus {
|
||||
if config.model_provider.requires_openai_auth {
|
||||
// Reading the OpenAI API key is an async operation because it may need
|
||||
// to refresh the token. Block on it.
|
||||
let codex_home = config.codex_home.clone();
|
||||
match CodexAuth::from_codex_home(&codex_home) {
|
||||
Ok(Some(_)) => false,
|
||||
Ok(None) => true,
|
||||
match CodexAuth::from_codex_home(&codex_home, config.preferred_auth_method) {
|
||||
Ok(Some(auth)) => LoginStatus::AuthMode(auth.mode),
|
||||
Ok(None) => LoginStatus::NotAuthenticated,
|
||||
Err(err) => {
|
||||
error!("Failed to read auth.json: {err}");
|
||||
true
|
||||
LoginStatus::NotAuthenticated
|
||||
}
|
||||
}
|
||||
} else {
|
||||
false
|
||||
LoginStatus::NotAuthenticated
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ use ratatui::widgets::Wrap;
|
||||
|
||||
use codex_login::AuthMode;
|
||||
|
||||
use crate::LoginStatus;
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::onboarding::onboarding_screen::KeyboardHandler;
|
||||
@@ -96,6 +97,8 @@ pub(crate) struct AuthModeWidget {
|
||||
pub error: Option<String>,
|
||||
pub sign_in_state: SignInState,
|
||||
pub codex_home: PathBuf,
|
||||
pub login_status: LoginStatus,
|
||||
pub preferred_auth_method: AuthMode,
|
||||
}
|
||||
|
||||
impl AuthModeWidget {
|
||||
@@ -118,6 +121,24 @@ impl AuthModeWidget {
|
||||
Line::from(""),
|
||||
];
|
||||
|
||||
// If the user is already authenticated but the method differs from their
|
||||
// preferred auth method, show a brief explanation.
|
||||
if let LoginStatus::AuthMode(current) = self.login_status {
|
||||
if current != self.preferred_auth_method {
|
||||
let to_label = |mode: AuthMode| match mode {
|
||||
AuthMode::ApiKey => "API key",
|
||||
AuthMode::ChatGPT => "ChatGPT",
|
||||
};
|
||||
let msg = format!(
|
||||
" You’re currently using {} while your preferred method is {}.",
|
||||
to_label(current),
|
||||
to_label(self.preferred_auth_method)
|
||||
);
|
||||
lines.push(Line::from(msg).style(Style::default()));
|
||||
lines.push(Line::from(""));
|
||||
}
|
||||
}
|
||||
|
||||
let create_mode_item = |idx: usize,
|
||||
selected_mode: AuthMode,
|
||||
text: &str,
|
||||
@@ -146,17 +167,29 @@ impl AuthModeWidget {
|
||||
|
||||
vec![line1, line2]
|
||||
};
|
||||
let chatgpt_label = if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ChatGPT))
|
||||
{
|
||||
"Continue using ChatGPT"
|
||||
} else {
|
||||
"Sign in with ChatGPT"
|
||||
};
|
||||
|
||||
lines.extend(create_mode_item(
|
||||
0,
|
||||
AuthMode::ChatGPT,
|
||||
"Sign in with ChatGPT",
|
||||
chatgpt_label,
|
||||
"Usage included with Plus, Pro, and Team plans",
|
||||
));
|
||||
let api_key_label = if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ApiKey))
|
||||
{
|
||||
"Continue using API key"
|
||||
} else {
|
||||
"Provide your own API key"
|
||||
};
|
||||
lines.extend(create_mode_item(
|
||||
1,
|
||||
AuthMode::ApiKey,
|
||||
"Provide your own API key",
|
||||
api_key_label,
|
||||
"Pay for what you use",
|
||||
));
|
||||
lines.push(Line::from(""));
|
||||
@@ -279,6 +312,14 @@ impl AuthModeWidget {
|
||||
}
|
||||
|
||||
fn start_chatgpt_login(&mut self) {
|
||||
// If we're already authenticated with ChatGPT, don't start a new login –
|
||||
// just proceed to the success message flow.
|
||||
if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ChatGPT)) {
|
||||
self.sign_in_state = SignInState::ChatGptSuccess;
|
||||
self.event_tx.send(AppEvent::RequestRedraw);
|
||||
return;
|
||||
}
|
||||
|
||||
self.error = None;
|
||||
let opts = ServerOptions::new(self.codex_home.clone(), CLIENT_ID.to_string());
|
||||
let server = run_login_server(opts);
|
||||
@@ -309,11 +350,14 @@ impl AuthModeWidget {
|
||||
|
||||
/// TODO: Read/write from the correct hierarchy config overrides + auth json + OPENAI_API_KEY.
|
||||
fn verify_api_key(&mut self) {
|
||||
if std::env::var("OPENAI_API_KEY").is_err() {
|
||||
self.sign_in_state = SignInState::EnvVarMissing;
|
||||
} else {
|
||||
if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ApiKey)) {
|
||||
// We already have an API key configured (e.g., from auth.json or env),
|
||||
// so mark this step complete immediately.
|
||||
self.sign_in_state = SignInState::EnvVarFound;
|
||||
} else {
|
||||
self.sign_in_state = SignInState::EnvVarMissing;
|
||||
}
|
||||
|
||||
self.event_tx.send(AppEvent::RequestRedraw);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ use ratatui::widgets::WidgetRef;
|
||||
|
||||
use codex_login::AuthMode;
|
||||
|
||||
use crate::LoginStatus;
|
||||
use crate::app::ChatWidgetArgs;
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
@@ -53,8 +54,9 @@ pub(crate) struct OnboardingScreenArgs {
|
||||
pub chat_widget_args: ChatWidgetArgs,
|
||||
pub codex_home: PathBuf,
|
||||
pub cwd: PathBuf,
|
||||
pub show_login_screen: bool,
|
||||
pub show_trust_screen: bool,
|
||||
pub show_login_screen: bool,
|
||||
pub login_status: LoginStatus,
|
||||
}
|
||||
|
||||
impl OnboardingScreen {
|
||||
@@ -64,11 +66,12 @@ impl OnboardingScreen {
|
||||
chat_widget_args,
|
||||
codex_home,
|
||||
cwd,
|
||||
show_login_screen,
|
||||
show_trust_screen,
|
||||
show_login_screen,
|
||||
login_status,
|
||||
} = args;
|
||||
let mut steps: Vec<Step> = vec![Step::Welcome(WelcomeWidget {
|
||||
is_logged_in: !show_login_screen,
|
||||
is_logged_in: !matches!(login_status, LoginStatus::NotAuthenticated),
|
||||
})];
|
||||
if show_login_screen {
|
||||
steps.push(Step::Auth(AuthModeWidget {
|
||||
@@ -77,6 +80,8 @@ impl OnboardingScreen {
|
||||
error: None,
|
||||
sign_in_state: SignInState::PickMode,
|
||||
codex_home: codex_home.clone(),
|
||||
login_status,
|
||||
preferred_auth_method: chat_widget_args.config.preferred_auth_method,
|
||||
}))
|
||||
}
|
||||
let is_git_repo = is_inside_git_repo(&cwd);
|
||||
|
||||
Reference in New Issue
Block a user