Show login options when not signed in with ChatGPT (#2440)
Motivation: we have users who uses their API key although they want to use ChatGPT account. We want to give them the chance to always login with their account. This PR displays login options when the user is not signed in with ChatGPT. Even if you have set an OpenAI API key as an environment variable, you will still be prompted to log in with ChatGPT. We’ve also added a new flag, `always_use_api_key_signing` false by default, which ensures you are never asked to log in with ChatGPT and always defaults to using your API key. https://github.com/user-attachments/assets/b61ebfa9-3c5e-4ab7-bf94-395c23a0e0af After ChatGPT sign in: https://github.com/user-attachments/assets/d58b366b-c46a-428f-a22f-2ac230f991c0
This commit is contained in:
@@ -30,7 +30,8 @@ mod token_data;
|
||||
pub const CLIENT_ID: &str = "app_EMoamEEZ73f0CkXaXp7hrann";
|
||||
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Copy)]
|
||||
#[derive(Clone, Debug, PartialEq, Copy, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
@@ -63,8 +64,11 @@ impl CodexAuth {
|
||||
|
||||
/// Loads the available auth information from the auth.json or
|
||||
/// OPENAI_API_KEY environment variable.
|
||||
pub fn from_codex_home(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, true)
|
||||
pub fn from_codex_home(
|
||||
codex_home: &Path,
|
||||
preferred_auth_method: AuthMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
load_auth(codex_home, true, preferred_auth_method)
|
||||
}
|
||||
|
||||
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||
@@ -165,7 +169,11 @@ impl CodexAuth {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option<CodexAuth>> {
|
||||
fn load_auth(
|
||||
codex_home: &Path,
|
||||
include_env_var: bool,
|
||||
preferred_auth_method: AuthMode,
|
||||
) -> std::io::Result<Option<CodexAuth>> {
|
||||
// First, check to see if there is a valid auth.json file. If not, we fall
|
||||
// back to AuthMode::ApiKey using the OPENAI_API_KEY environment variable
|
||||
// (if it is set).
|
||||
@@ -201,7 +209,7 @@ fn load_auth(codex_home: &Path, include_env_var: bool) -> std::io::Result<Option
|
||||
// "refreshable" even if we are using the API key for auth?
|
||||
match &tokens {
|
||||
Some(tokens) => {
|
||||
if tokens.is_plan_that_should_use_api_key() {
|
||||
if tokens.should_use_api_key(preferred_auth_method) {
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
} else {
|
||||
// Ignore the API key and fall through to ChatGPT auth.
|
||||
@@ -383,7 +391,9 @@ mod tests {
|
||||
fn writes_api_key_and_loads_auth() {
|
||||
let dir = tempdir().unwrap();
|
||||
login_with_api_key(dir.path(), "sk-test-key").unwrap();
|
||||
let auth = load_auth(dir.path(), false).unwrap().unwrap();
|
||||
let auth = load_auth(dir.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key.as_deref(), Some("sk-test-key"));
|
||||
}
|
||||
@@ -395,7 +405,9 @@ mod tests {
|
||||
let env_var = std::env::var(OPENAI_API_KEY_ENV_VAR);
|
||||
|
||||
if let Ok(env_var) = env_var {
|
||||
let auth = load_auth(dir.path(), true).unwrap().unwrap();
|
||||
let auth = load_auth(dir.path(), true, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key, Some(env_var));
|
||||
}
|
||||
@@ -438,7 +450,9 @@ mod tests {
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
@@ -487,7 +501,9 @@ mod tests {
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(None, api_key);
|
||||
assert_eq!(AuthMode::ChatGPT, mode);
|
||||
|
||||
@@ -535,7 +551,9 @@ mod tests {
|
||||
mode,
|
||||
auth_dot_json,
|
||||
auth_file: _,
|
||||
} = load_auth(codex_home.path(), false).unwrap().unwrap();
|
||||
} = load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(Some("sk-test-key".to_string()), api_key);
|
||||
assert_eq!(AuthMode::ApiKey, mode);
|
||||
|
||||
@@ -624,7 +642,9 @@ mod tests {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let auth = load_auth(dir.path(), false).unwrap().unwrap();
|
||||
let auth = load_auth(dir.path(), false, AuthMode::ChatGPT)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||
assert_eq!(auth.api_key, Some("sk-test-key".to_string()));
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::AuthMode;
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
|
||||
pub struct TokenData {
|
||||
/// Flat info parsed from the JWT in auth.json.
|
||||
@@ -23,7 +25,11 @@ pub struct TokenData {
|
||||
impl TokenData {
|
||||
/// Returns true if this is a plan that should use the traditional
|
||||
/// "metered" billing via an API key.
|
||||
pub(crate) fn is_plan_that_should_use_api_key(&self) -> bool {
|
||||
pub(crate) fn should_use_api_key(&self, preferred_auth_method: AuthMode) -> bool {
|
||||
if preferred_auth_method == AuthMode::ApiKey {
|
||||
return true;
|
||||
}
|
||||
|
||||
self.id_token
|
||||
.chatgpt_plan_type
|
||||
.as_ref()
|
||||
|
||||
Reference in New Issue
Block a user