Simplify auth flow and reconcile differences between ChatGPT and API Key auth (#3189)
This PR does the following: * Adds the ability to paste or type an API key. * Removes the `preferred_auth_method` config option. The last login method is always persisted in auth.json, so this isn't needed. * If OPENAI_API_KEY env variable is defined, the value is used to prepopulate the new UI. The env variable is otherwise ignored by the CLI. * Adds a new MCP server entry point "login_api_key" so we can implement this same API key behavior for the VS Code extension. <img width="473" height="140" alt="Screenshot 2025-09-04 at 3 51 04 PM" src="https://github.com/user-attachments/assets/c11bbd5b-8a4d-4d71-90fd-34130460f9d9" /> <img width="726" height="254" alt="Screenshot 2025-09-04 at 3 51 32 PM" src="https://github.com/user-attachments/assets/6cc76b34-309a-4387-acbc-15ee5c756db9" />
This commit is contained in:
1
codex-rs/Cargo.lock
generated
1
codex-rs/Cargo.lock
generated
@@ -561,7 +561,6 @@ dependencies = [
|
|||||||
"clap",
|
"clap",
|
||||||
"codex-common",
|
"codex-common",
|
||||||
"codex-core",
|
"codex-core",
|
||||||
"codex-protocol",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ anyhow = "1"
|
|||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
codex-common = { path = "../common", features = ["cli"] }
|
codex-common = { path = "../common", features = ["cli"] }
|
||||||
codex-core = { path = "../core" }
|
codex-core = { path = "../core" }
|
||||||
codex-protocol = { path = "../protocol" }
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
use codex_core::CodexAuth;
|
use codex_core::CodexAuth;
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
use std::sync::RwLock;
|
use std::sync::RwLock;
|
||||||
@@ -20,7 +19,7 @@ pub fn set_chatgpt_token_data(value: TokenData) {
|
|||||||
|
|
||||||
/// Initialize the ChatGPT token from auth.json file
|
/// Initialize the ChatGPT token from auth.json file
|
||||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||||
let auth = CodexAuth::from_codex_home(codex_home, AuthMode::ChatGPT)?;
|
let auth = CodexAuth::from_codex_home(codex_home)?;
|
||||||
if let Some(auth) = auth {
|
if let Some(auth) = auth {
|
||||||
let token_data = auth.get_token_data().await?;
|
let token_data = auth.get_token_data().await?;
|
||||||
set_chatgpt_token_data(token_data);
|
set_chatgpt_token_data(token_data);
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use codex_common::CliConfigOverrides;
|
use codex_common::CliConfigOverrides;
|
||||||
use codex_core::CodexAuth;
|
use codex_core::CodexAuth;
|
||||||
use codex_core::auth::CLIENT_ID;
|
use codex_core::auth::CLIENT_ID;
|
||||||
use codex_core::auth::OPENAI_API_KEY_ENV_VAR;
|
|
||||||
use codex_core::auth::login_with_api_key;
|
use codex_core::auth::login_with_api_key;
|
||||||
use codex_core::auth::logout;
|
use codex_core::auth::logout;
|
||||||
use codex_core::config::Config;
|
use codex_core::config::Config;
|
||||||
@@ -9,7 +8,6 @@ use codex_core::config::ConfigOverrides;
|
|||||||
use codex_login::ServerOptions;
|
use codex_login::ServerOptions;
|
||||||
use codex_login::run_login_server;
|
use codex_login::run_login_server;
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
use codex_protocol::mcp_protocol::AuthMode;
|
||||||
use std::env;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
|
pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
|
||||||
@@ -60,19 +58,11 @@ pub async fn run_login_with_api_key(
|
|||||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||||
let config = load_config_or_exit(cli_config_overrides);
|
let config = load_config_or_exit(cli_config_overrides);
|
||||||
|
|
||||||
match CodexAuth::from_codex_home(&config.codex_home, config.preferred_auth_method) {
|
match CodexAuth::from_codex_home(&config.codex_home) {
|
||||||
Ok(Some(auth)) => match auth.mode {
|
Ok(Some(auth)) => match auth.mode {
|
||||||
AuthMode::ApiKey => match auth.get_token().await {
|
AuthMode::ApiKey => match auth.get_token().await {
|
||||||
Ok(api_key) => {
|
Ok(api_key) => {
|
||||||
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
||||||
|
|
||||||
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR)
|
|
||||||
&& env_api_key == api_key
|
|
||||||
{
|
|
||||||
eprintln!(
|
|
||||||
" API loaded from OPENAI_API_KEY environment variable or .env file"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|||||||
@@ -37,10 +37,8 @@ pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
|
let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
|
||||||
// Use conversation_manager API to start a conversation
|
// Use conversation_manager API to start a conversation
|
||||||
let conversation_manager = ConversationManager::new(AuthManager::shared(
|
let conversation_manager =
|
||||||
config.codex_home.clone(),
|
ConversationManager::new(AuthManager::shared(config.codex_home.clone()));
|
||||||
config.preferred_auth_method,
|
|
||||||
));
|
|
||||||
let NewConversation {
|
let NewConversation {
|
||||||
conversation_id: _,
|
conversation_id: _,
|
||||||
conversation,
|
conversation,
|
||||||
|
|||||||
@@ -70,13 +70,9 @@ impl CodexAuth {
|
|||||||
Ok(access)
|
Ok(access)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Loads the available auth information from the auth.json or
|
/// Loads the available auth information from the auth.json.
|
||||||
/// OPENAI_API_KEY environment variable.
|
pub fn from_codex_home(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||||
pub fn from_codex_home(
|
load_auth(codex_home)
|
||||||
codex_home: &Path,
|
|
||||||
preferred_auth_method: AuthMode,
|
|
||||||
) -> std::io::Result<Option<CodexAuth>> {
|
|
||||||
load_auth(codex_home, true, preferred_auth_method)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
pub async fn get_token_data(&self) -> Result<TokenData, std::io::Error> {
|
||||||
@@ -193,10 +189,11 @@ impl CodexAuth {
|
|||||||
|
|
||||||
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
||||||
|
|
||||||
fn read_openai_api_key_from_env() -> Option<String> {
|
pub fn read_openai_api_key_from_env() -> Option<String> {
|
||||||
env::var(OPENAI_API_KEY_ENV_VAR)
|
env::var(OPENAI_API_KEY_ENV_VAR)
|
||||||
.ok()
|
.ok()
|
||||||
.filter(|s| !s.is_empty())
|
.map(|value| value.trim().to_string())
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
|
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
|
||||||
@@ -214,7 +211,7 @@ pub fn logout(codex_home: &Path) -> std::io::Result<bool> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Writes an `auth.json` that contains only the API key. Intended for CLI use.
|
/// Writes an `auth.json` that contains only the API key.
|
||||||
pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<()> {
|
pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<()> {
|
||||||
let auth_dot_json = AuthDotJson {
|
let auth_dot_json = AuthDotJson {
|
||||||
openai_api_key: Some(api_key.to_string()),
|
openai_api_key: Some(api_key.to_string()),
|
||||||
@@ -224,28 +221,11 @@ pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<(
|
|||||||
write_auth_json(&get_auth_file(codex_home), &auth_dot_json)
|
write_auth_json(&get_auth_file(codex_home), &auth_dot_json)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_auth(
|
fn load_auth(codex_home: &Path) -> std::io::Result<Option<CodexAuth>> {
|
||||||
codex_home: &Path,
|
|
||||||
include_env_var: bool,
|
|
||||||
preferred_auth_method: AuthMode,
|
|
||||||
) -> std::io::Result<Option<CodexAuth>> {
|
|
||||||
// First, check to see if there is a valid auth.json file. If not, we fall
|
|
||||||
// back to AuthMode::ApiKey using the OPENAI_API_KEY environment variable
|
|
||||||
// (if it is set).
|
|
||||||
let auth_file = get_auth_file(codex_home);
|
let auth_file = get_auth_file(codex_home);
|
||||||
let client = crate::default_client::create_client();
|
let client = crate::default_client::create_client();
|
||||||
let auth_dot_json = match try_read_auth_json(&auth_file) {
|
let auth_dot_json = match try_read_auth_json(&auth_file) {
|
||||||
Ok(auth) => auth,
|
Ok(auth) => auth,
|
||||||
// If auth.json does not exist, try to read the OPENAI_API_KEY from the
|
|
||||||
// environment variable.
|
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound && include_env_var => {
|
|
||||||
return match read_openai_api_key_from_env() {
|
|
||||||
Some(api_key) => Ok(Some(CodexAuth::from_api_key_with_client(&api_key, client))),
|
|
||||||
None => Ok(None),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// Though if auth.json exists but is malformed, do not fall back to the
|
|
||||||
// env var because the user may be expecting to use AuthMode::ChatGPT.
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
@@ -257,32 +237,11 @@ fn load_auth(
|
|||||||
last_refresh,
|
last_refresh,
|
||||||
} = auth_dot_json;
|
} = auth_dot_json;
|
||||||
|
|
||||||
// If the auth.json has an API key AND does not appear to be on a plan that
|
// Prefer AuthMode.ApiKey if it's set in the auth.json.
|
||||||
// should prefer AuthMode::ChatGPT, use AuthMode::ApiKey.
|
|
||||||
if let Some(api_key) = &auth_json_api_key {
|
if let Some(api_key) = &auth_json_api_key {
|
||||||
// Should any of these be AuthMode::ChatGPT with the api_key set?
|
return Ok(Some(CodexAuth::from_api_key_with_client(api_key, client)));
|
||||||
// Does AuthMode::ChatGPT indicate that there is an auth.json that is
|
|
||||||
// "refreshable" even if we are using the API key for auth?
|
|
||||||
match &tokens {
|
|
||||||
Some(tokens) => {
|
|
||||||
if tokens.should_use_api_key(preferred_auth_method, tokens.is_openai_email()) {
|
|
||||||
return Ok(Some(CodexAuth::from_api_key_with_client(api_key, client)));
|
|
||||||
} else {
|
|
||||||
// Ignore the API key and fall through to ChatGPT auth.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
// We have an API key but no tokens in the auth.json file.
|
|
||||||
// Perhaps the user ran `codex login --api-key <KEY>` or updated
|
|
||||||
// auth.json by hand. Either way, let's assume they are trying
|
|
||||||
// to use their API key.
|
|
||||||
return Ok(Some(CodexAuth::from_api_key_with_client(api_key, client)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// For the AuthMode::ChatGPT variant, perhaps neither api_key nor
|
|
||||||
// openai_api_key should exist?
|
|
||||||
Ok(Some(CodexAuth {
|
Ok(Some(CodexAuth {
|
||||||
api_key: None,
|
api_key: None,
|
||||||
mode: AuthMode::ChatGPT,
|
mode: AuthMode::ChatGPT,
|
||||||
@@ -412,7 +371,6 @@ use std::sync::RwLock;
|
|||||||
/// Internal cached auth state.
|
/// Internal cached auth state.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct CachedAuth {
|
struct CachedAuth {
|
||||||
preferred_auth_mode: AuthMode,
|
|
||||||
auth: Option<CodexAuth>,
|
auth: Option<CodexAuth>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -468,9 +426,7 @@ mod tests {
|
|||||||
auth_dot_json,
|
auth_dot_json,
|
||||||
auth_file: _,
|
auth_file: _,
|
||||||
..
|
..
|
||||||
} = super::load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
} = super::load_auth(codex_home.path()).unwrap().unwrap();
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(None, api_key);
|
assert_eq!(None, api_key);
|
||||||
assert_eq!(AuthMode::ChatGPT, mode);
|
assert_eq!(AuthMode::ChatGPT, mode);
|
||||||
|
|
||||||
@@ -499,88 +455,6 @@ mod tests {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Even if the OPENAI_API_KEY is set in auth.json, if the plan is not in
|
|
||||||
/// [`TokenData::is_plan_that_should_use_api_key`], it should use
|
|
||||||
/// [`AuthMode::ChatGPT`].
|
|
||||||
#[tokio::test]
|
|
||||||
async fn pro_account_with_api_key_still_uses_chatgpt_auth() {
|
|
||||||
let codex_home = tempdir().unwrap();
|
|
||||||
let fake_jwt = write_auth_file(
|
|
||||||
AuthFileParams {
|
|
||||||
openai_api_key: Some("sk-test-key".to_string()),
|
|
||||||
chatgpt_plan_type: "pro".to_string(),
|
|
||||||
},
|
|
||||||
codex_home.path(),
|
|
||||||
)
|
|
||||||
.expect("failed to write auth file");
|
|
||||||
|
|
||||||
let CodexAuth {
|
|
||||||
api_key,
|
|
||||||
mode,
|
|
||||||
auth_dot_json,
|
|
||||||
auth_file: _,
|
|
||||||
..
|
|
||||||
} = super::load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(None, api_key);
|
|
||||||
assert_eq!(AuthMode::ChatGPT, mode);
|
|
||||||
|
|
||||||
let guard = auth_dot_json.lock().unwrap();
|
|
||||||
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
|
|
||||||
assert_eq!(
|
|
||||||
&AuthDotJson {
|
|
||||||
openai_api_key: None,
|
|
||||||
tokens: Some(TokenData {
|
|
||||||
id_token: IdTokenInfo {
|
|
||||||
email: Some("user@example.com".to_string()),
|
|
||||||
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
|
||||||
raw_jwt: fake_jwt,
|
|
||||||
},
|
|
||||||
access_token: "test-access-token".to_string(),
|
|
||||||
refresh_token: "test-refresh-token".to_string(),
|
|
||||||
account_id: None,
|
|
||||||
}),
|
|
||||||
last_refresh: Some(
|
|
||||||
DateTime::parse_from_rfc3339(LAST_REFRESH)
|
|
||||||
.unwrap()
|
|
||||||
.with_timezone(&Utc)
|
|
||||||
),
|
|
||||||
},
|
|
||||||
auth_dot_json
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the OPENAI_API_KEY is set in auth.json and it is an enterprise
|
|
||||||
/// account, then it should use [`AuthMode::ApiKey`].
|
|
||||||
#[tokio::test]
|
|
||||||
async fn enterprise_account_with_api_key_uses_apikey_auth() {
|
|
||||||
let codex_home = tempdir().unwrap();
|
|
||||||
write_auth_file(
|
|
||||||
AuthFileParams {
|
|
||||||
openai_api_key: Some("sk-test-key".to_string()),
|
|
||||||
chatgpt_plan_type: "enterprise".to_string(),
|
|
||||||
},
|
|
||||||
codex_home.path(),
|
|
||||||
)
|
|
||||||
.expect("failed to write auth file");
|
|
||||||
|
|
||||||
let CodexAuth {
|
|
||||||
api_key,
|
|
||||||
mode,
|
|
||||||
auth_dot_json,
|
|
||||||
auth_file: _,
|
|
||||||
..
|
|
||||||
} = super::load_auth(codex_home.path(), false, AuthMode::ChatGPT)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(Some("sk-test-key".to_string()), api_key);
|
|
||||||
assert_eq!(AuthMode::ApiKey, mode);
|
|
||||||
|
|
||||||
let guard = auth_dot_json.lock().expect("should unwrap");
|
|
||||||
assert!(guard.is_none(), "auth_dot_json should be None");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn loads_api_key_from_auth_json() {
|
async fn loads_api_key_from_auth_json() {
|
||||||
let dir = tempdir().unwrap();
|
let dir = tempdir().unwrap();
|
||||||
@@ -591,9 +465,7 @@ mod tests {
|
|||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let auth = super::load_auth(dir.path(), false, AuthMode::ChatGPT)
|
let auth = super::load_auth(dir.path()).unwrap().unwrap();
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(auth.mode, AuthMode::ApiKey);
|
assert_eq!(auth.mode, AuthMode::ApiKey);
|
||||||
assert_eq!(auth.api_key, Some("sk-test-key".to_string()));
|
assert_eq!(auth.api_key, Some("sk-test-key".to_string()));
|
||||||
|
|
||||||
@@ -683,26 +555,17 @@ impl AuthManager {
|
|||||||
/// preferred auth method. Errors loading auth are swallowed; `auth()` will
|
/// preferred auth method. Errors loading auth are swallowed; `auth()` will
|
||||||
/// simply return `None` in that case so callers can treat it as an
|
/// simply return `None` in that case so callers can treat it as an
|
||||||
/// unauthenticated state.
|
/// unauthenticated state.
|
||||||
pub fn new(codex_home: PathBuf, preferred_auth_mode: AuthMode) -> Self {
|
pub fn new(codex_home: PathBuf) -> Self {
|
||||||
let auth = CodexAuth::from_codex_home(&codex_home, preferred_auth_mode)
|
let auth = CodexAuth::from_codex_home(&codex_home).ok().flatten();
|
||||||
.ok()
|
|
||||||
.flatten();
|
|
||||||
Self {
|
Self {
|
||||||
codex_home,
|
codex_home,
|
||||||
inner: RwLock::new(CachedAuth {
|
inner: RwLock::new(CachedAuth { auth }),
|
||||||
preferred_auth_mode,
|
|
||||||
auth,
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an AuthManager with a specific CodexAuth, for testing only.
|
/// Create an AuthManager with a specific CodexAuth, for testing only.
|
||||||
pub fn from_auth_for_testing(auth: CodexAuth) -> Arc<Self> {
|
pub fn from_auth_for_testing(auth: CodexAuth) -> Arc<Self> {
|
||||||
let preferred_auth_mode = auth.mode;
|
let cached = CachedAuth { auth: Some(auth) };
|
||||||
let cached = CachedAuth {
|
|
||||||
preferred_auth_mode,
|
|
||||||
auth: Some(auth),
|
|
||||||
};
|
|
||||||
Arc::new(Self {
|
Arc::new(Self {
|
||||||
codex_home: PathBuf::new(),
|
codex_home: PathBuf::new(),
|
||||||
inner: RwLock::new(cached),
|
inner: RwLock::new(cached),
|
||||||
@@ -714,21 +577,10 @@ impl AuthManager {
|
|||||||
self.inner.read().ok().and_then(|c| c.auth.clone())
|
self.inner.read().ok().and_then(|c| c.auth.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Preferred auth method used when (re)loading.
|
/// Force a reload of the auth information from auth.json. Returns
|
||||||
pub fn preferred_auth_method(&self) -> AuthMode {
|
|
||||||
self.inner
|
|
||||||
.read()
|
|
||||||
.map(|c| c.preferred_auth_mode)
|
|
||||||
.unwrap_or(AuthMode::ApiKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Force a reload using the existing preferred auth method. Returns
|
|
||||||
/// whether the auth value changed.
|
/// whether the auth value changed.
|
||||||
pub fn reload(&self) -> bool {
|
pub fn reload(&self) -> bool {
|
||||||
let preferred = self.preferred_auth_method();
|
let new_auth = CodexAuth::from_codex_home(&self.codex_home).ok().flatten();
|
||||||
let new_auth = CodexAuth::from_codex_home(&self.codex_home, preferred)
|
|
||||||
.ok()
|
|
||||||
.flatten();
|
|
||||||
if let Ok(mut guard) = self.inner.write() {
|
if let Ok(mut guard) = self.inner.write() {
|
||||||
let changed = !AuthManager::auths_equal(&guard.auth, &new_auth);
|
let changed = !AuthManager::auths_equal(&guard.auth, &new_auth);
|
||||||
guard.auth = new_auth;
|
guard.auth = new_auth;
|
||||||
@@ -747,8 +599,8 @@ impl AuthManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience constructor returning an `Arc` wrapper.
|
/// Convenience constructor returning an `Arc` wrapper.
|
||||||
pub fn shared(codex_home: PathBuf, preferred_auth_mode: AuthMode) -> Arc<Self> {
|
pub fn shared(codex_home: PathBuf) -> Arc<Self> {
|
||||||
Arc::new(Self::new(codex_home, preferred_auth_mode))
|
Arc::new(Self::new(codex_home))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to refresh the current auth token (if any). On success, reload
|
/// Attempt to refresh the current auth token (if any). On success, reload
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ use codex_protocol::config_types::ReasoningEffort;
|
|||||||
use codex_protocol::config_types::ReasoningSummary;
|
use codex_protocol::config_types::ReasoningSummary;
|
||||||
use codex_protocol::config_types::SandboxMode;
|
use codex_protocol::config_types::SandboxMode;
|
||||||
use codex_protocol::config_types::Verbosity;
|
use codex_protocol::config_types::Verbosity;
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
|
||||||
use codex_protocol::mcp_protocol::Tools;
|
use codex_protocol::mcp_protocol::Tools;
|
||||||
use codex_protocol::mcp_protocol::UserSavedConfig;
|
use codex_protocol::mcp_protocol::UserSavedConfig;
|
||||||
use dirs::home_dir;
|
use dirs::home_dir;
|
||||||
@@ -167,9 +166,6 @@ pub struct Config {
|
|||||||
|
|
||||||
pub tools_web_search_request: bool,
|
pub tools_web_search_request: bool,
|
||||||
|
|
||||||
/// If set to `true`, the API key will be signed with the `originator` header.
|
|
||||||
pub preferred_auth_method: AuthMode,
|
|
||||||
|
|
||||||
pub use_experimental_streamable_shell_tool: bool,
|
pub use_experimental_streamable_shell_tool: bool,
|
||||||
|
|
||||||
/// If set to `true`, used only the experimental unified exec tool.
|
/// If set to `true`, used only the experimental unified exec tool.
|
||||||
@@ -494,9 +490,6 @@ pub struct ConfigToml {
|
|||||||
|
|
||||||
pub projects: Option<HashMap<String, ProjectConfig>>,
|
pub projects: Option<HashMap<String, ProjectConfig>>,
|
||||||
|
|
||||||
/// If set to `true`, the API key will be signed with the `originator` header.
|
|
||||||
pub preferred_auth_method: Option<AuthMode>,
|
|
||||||
|
|
||||||
/// Nested tools section for feature toggles
|
/// Nested tools section for feature toggles
|
||||||
pub tools: Option<ToolsToml>,
|
pub tools: Option<ToolsToml>,
|
||||||
|
|
||||||
@@ -837,7 +830,6 @@ impl Config {
|
|||||||
include_plan_tool: include_plan_tool.unwrap_or(false),
|
include_plan_tool: include_plan_tool.unwrap_or(false),
|
||||||
include_apply_patch_tool: include_apply_patch_tool.unwrap_or(false),
|
include_apply_patch_tool: include_apply_patch_tool.unwrap_or(false),
|
||||||
tools_web_search_request,
|
tools_web_search_request,
|
||||||
preferred_auth_method: cfg.preferred_auth_method.unwrap_or(AuthMode::ChatGPT),
|
|
||||||
use_experimental_streamable_shell_tool: cfg
|
use_experimental_streamable_shell_tool: cfg
|
||||||
.experimental_use_exec_command_tool
|
.experimental_use_exec_command_tool
|
||||||
.unwrap_or(false),
|
.unwrap_or(false),
|
||||||
@@ -1217,7 +1209,6 @@ model_verbosity = "high"
|
|||||||
include_plan_tool: false,
|
include_plan_tool: false,
|
||||||
include_apply_patch_tool: false,
|
include_apply_patch_tool: false,
|
||||||
tools_web_search_request: false,
|
tools_web_search_request: false,
|
||||||
preferred_auth_method: AuthMode::ChatGPT,
|
|
||||||
use_experimental_streamable_shell_tool: false,
|
use_experimental_streamable_shell_tool: false,
|
||||||
use_experimental_unified_exec_tool: true,
|
use_experimental_unified_exec_tool: true,
|
||||||
include_view_image_tool: true,
|
include_view_image_tool: true,
|
||||||
@@ -1275,7 +1266,6 @@ model_verbosity = "high"
|
|||||||
include_plan_tool: false,
|
include_plan_tool: false,
|
||||||
include_apply_patch_tool: false,
|
include_apply_patch_tool: false,
|
||||||
tools_web_search_request: false,
|
tools_web_search_request: false,
|
||||||
preferred_auth_method: AuthMode::ChatGPT,
|
|
||||||
use_experimental_streamable_shell_tool: false,
|
use_experimental_streamable_shell_tool: false,
|
||||||
use_experimental_unified_exec_tool: true,
|
use_experimental_unified_exec_tool: true,
|
||||||
include_view_image_tool: true,
|
include_view_image_tool: true,
|
||||||
@@ -1348,7 +1338,6 @@ model_verbosity = "high"
|
|||||||
include_plan_tool: false,
|
include_plan_tool: false,
|
||||||
include_apply_patch_tool: false,
|
include_apply_patch_tool: false,
|
||||||
tools_web_search_request: false,
|
tools_web_search_request: false,
|
||||||
preferred_auth_method: AuthMode::ChatGPT,
|
|
||||||
use_experimental_streamable_shell_tool: false,
|
use_experimental_streamable_shell_tool: false,
|
||||||
use_experimental_unified_exec_tool: true,
|
use_experimental_unified_exec_tool: true,
|
||||||
include_view_image_tool: true,
|
include_view_image_tool: true,
|
||||||
@@ -1407,7 +1396,6 @@ model_verbosity = "high"
|
|||||||
include_plan_tool: false,
|
include_plan_tool: false,
|
||||||
include_apply_patch_tool: false,
|
include_apply_patch_tool: false,
|
||||||
tools_web_search_request: false,
|
tools_web_search_request: false,
|
||||||
preferred_auth_method: AuthMode::ChatGPT,
|
|
||||||
use_experimental_streamable_shell_tool: false,
|
use_experimental_streamable_shell_tool: false,
|
||||||
use_experimental_unified_exec_tool: true,
|
use_experimental_unified_exec_tool: true,
|
||||||
include_view_image_tool: true,
|
include_view_image_tool: true,
|
||||||
|
|||||||
@@ -80,7 +80,10 @@ pub struct ModelProviderInfo {
|
|||||||
/// the connection as lost.
|
/// the connection as lost.
|
||||||
pub stream_idle_timeout_ms: Option<u64>,
|
pub stream_idle_timeout_ms: Option<u64>,
|
||||||
|
|
||||||
/// Whether this provider requires some form of standard authentication (API key, ChatGPT token).
|
/// Does this provider require an OpenAI API Key or ChatGPT login token? If true,
|
||||||
|
/// user is presented with login screen on first run, and login preference and token/key
|
||||||
|
/// are stored in auth.json. If false (which is the default), login screen is skipped,
|
||||||
|
/// and API key (if needed) comes from the "env_key" environment variable.
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub requires_openai_auth: bool,
|
pub requires_openai_auth: bool,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,6 @@ use serde::Deserialize;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
|
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
|
||||||
pub struct TokenData {
|
pub struct TokenData {
|
||||||
/// Flat info parsed from the JWT in auth.json.
|
/// Flat info parsed from the JWT in auth.json.
|
||||||
@@ -22,36 +20,6 @@ pub struct TokenData {
|
|||||||
pub account_id: Option<String>,
|
pub account_id: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenData {
|
|
||||||
/// Returns true if this is a plan that should use the traditional
|
|
||||||
/// "metered" billing via an API key.
|
|
||||||
pub(crate) fn should_use_api_key(
|
|
||||||
&self,
|
|
||||||
preferred_auth_method: AuthMode,
|
|
||||||
is_openai_email: bool,
|
|
||||||
) -> bool {
|
|
||||||
if preferred_auth_method == AuthMode::ApiKey {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// If the email is an OpenAI email, use AuthMode::ChatGPT unless preferred_auth_method is AuthMode::ApiKey.
|
|
||||||
if is_openai_email {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.id_token
|
|
||||||
.chatgpt_plan_type
|
|
||||||
.as_ref()
|
|
||||||
.is_none_or(|plan| plan.is_plan_that_should_use_api_key())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_openai_email(&self) -> bool {
|
|
||||||
self.id_token
|
|
||||||
.email
|
|
||||||
.as_deref()
|
|
||||||
.is_some_and(|email| email.trim().to_ascii_lowercase().ends_with("@openai.com"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Flat subset of useful claims in id_token from auth.json.
|
/// Flat subset of useful claims in id_token from auth.json.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||||
pub struct IdTokenInfo {
|
pub struct IdTokenInfo {
|
||||||
@@ -80,19 +48,6 @@ pub(crate) enum PlanType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PlanType {
|
impl PlanType {
|
||||||
fn is_plan_that_should_use_api_key(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Known(known) => {
|
|
||||||
use KnownPlan::*;
|
|
||||||
!matches!(known, Free | Plus | Pro | Team)
|
|
||||||
}
|
|
||||||
Self::Unknown(_) => {
|
|
||||||
// Unknown plans should use the API key.
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_string(&self) -> String {
|
pub fn as_string(&self) -> String {
|
||||||
match self {
|
match self {
|
||||||
Self::Known(known) => format!("{known:?}").to_lowercase(),
|
Self::Known(known) => format!("{known:?}").to_lowercase(),
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ use codex_core::protocol::EventMsg;
|
|||||||
use codex_core::protocol::InputItem;
|
use codex_core::protocol::InputItem;
|
||||||
use codex_core::protocol::Op;
|
use codex_core::protocol::Op;
|
||||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
|
||||||
use core_test_support::load_default_config_for_test;
|
use core_test_support::load_default_config_for_test;
|
||||||
use core_test_support::load_sse_fixture_with_id;
|
use core_test_support::load_sse_fixture_with_id;
|
||||||
use core_test_support::wait_for_event;
|
use core_test_support::wait_for_event;
|
||||||
@@ -489,79 +488,6 @@ async fn chatgpt_auth_sends_correct_request() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
|
||||||
async fn prefers_chatgpt_token_when_config_prefers_chatgpt() {
|
|
||||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
|
||||||
println!(
|
|
||||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mock server
|
|
||||||
let server = MockServer::start().await;
|
|
||||||
|
|
||||||
let first = ResponseTemplate::new(200)
|
|
||||||
.insert_header("content-type", "text/event-stream")
|
|
||||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
|
||||||
|
|
||||||
// Expect ChatGPT base path and correct headers
|
|
||||||
Mock::given(method("POST"))
|
|
||||||
.and(path("/v1/responses"))
|
|
||||||
.and(header_regex("Authorization", r"Bearer Access-123"))
|
|
||||||
.and(header_regex("chatgpt-account-id", r"acc-123"))
|
|
||||||
.respond_with(first)
|
|
||||||
.expect(1)
|
|
||||||
.mount(&server)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let model_provider = ModelProviderInfo {
|
|
||||||
base_url: Some(format!("{}/v1", server.uri())),
|
|
||||||
..built_in_model_providers()["openai"].clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Init session
|
|
||||||
let codex_home = TempDir::new().unwrap();
|
|
||||||
// Write auth.json that contains both API key and ChatGPT tokens for a plan that should prefer ChatGPT.
|
|
||||||
let _jwt = write_auth_json(
|
|
||||||
&codex_home,
|
|
||||||
Some("sk-test-key"),
|
|
||||||
"pro",
|
|
||||||
"Access-123",
|
|
||||||
Some("acc-123"),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut config = load_default_config_for_test(&codex_home);
|
|
||||||
config.model_provider = model_provider;
|
|
||||||
config.preferred_auth_method = AuthMode::ChatGPT;
|
|
||||||
|
|
||||||
let auth_manager =
|
|
||||||
match CodexAuth::from_codex_home(codex_home.path(), config.preferred_auth_method) {
|
|
||||||
Ok(Some(auth)) => codex_core::AuthManager::from_auth_for_testing(auth),
|
|
||||||
Ok(None) => panic!("No CodexAuth found in codex_home"),
|
|
||||||
Err(e) => panic!("Failed to load CodexAuth: {e}"),
|
|
||||||
};
|
|
||||||
let conversation_manager = ConversationManager::new(auth_manager);
|
|
||||||
let NewConversation {
|
|
||||||
conversation: codex,
|
|
||||||
..
|
|
||||||
} = conversation_manager
|
|
||||||
.new_conversation(config)
|
|
||||||
.await
|
|
||||||
.expect("create new conversation");
|
|
||||||
|
|
||||||
codex
|
|
||||||
.submit(Op::UserInput {
|
|
||||||
items: vec![InputItem::Text {
|
|
||||||
text: "hello".into(),
|
|
||||||
}],
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||||
async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
||||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||||
@@ -606,14 +532,12 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
|||||||
|
|
||||||
let mut config = load_default_config_for_test(&codex_home);
|
let mut config = load_default_config_for_test(&codex_home);
|
||||||
config.model_provider = model_provider;
|
config.model_provider = model_provider;
|
||||||
config.preferred_auth_method = AuthMode::ApiKey;
|
|
||||||
|
|
||||||
let auth_manager =
|
let auth_manager = match CodexAuth::from_codex_home(codex_home.path()) {
|
||||||
match CodexAuth::from_codex_home(codex_home.path(), config.preferred_auth_method) {
|
Ok(Some(auth)) => codex_core::AuthManager::from_auth_for_testing(auth),
|
||||||
Ok(Some(auth)) => codex_core::AuthManager::from_auth_for_testing(auth),
|
Ok(None) => panic!("No CodexAuth found in codex_home"),
|
||||||
Ok(None) => panic!("No CodexAuth found in codex_home"),
|
Err(e) => panic!("Failed to load CodexAuth: {e}"),
|
||||||
Err(e) => panic!("Failed to load CodexAuth: {e}"),
|
};
|
||||||
};
|
|
||||||
let conversation_manager = ConversationManager::new(auth_manager);
|
let conversation_manager = ConversationManager::new(auth_manager);
|
||||||
let NewConversation {
|
let NewConversation {
|
||||||
conversation: codex,
|
conversation: codex,
|
||||||
|
|||||||
@@ -187,10 +187,8 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
|||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
let conversation_manager = ConversationManager::new(AuthManager::shared(
|
let conversation_manager =
|
||||||
config.codex_home.clone(),
|
ConversationManager::new(AuthManager::shared(config.codex_home.clone()));
|
||||||
config.preferred_auth_method,
|
|
||||||
));
|
|
||||||
let NewConversation {
|
let NewConversation {
|
||||||
conversation_id: _,
|
conversation_id: _,
|
||||||
conversation,
|
conversation,
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ use codex_core::RolloutRecorder;
|
|||||||
use codex_core::SessionMeta;
|
use codex_core::SessionMeta;
|
||||||
use codex_core::auth::CLIENT_ID;
|
use codex_core::auth::CLIENT_ID;
|
||||||
use codex_core::auth::get_auth_file;
|
use codex_core::auth::get_auth_file;
|
||||||
|
use codex_core::auth::login_with_api_key;
|
||||||
use codex_core::auth::try_read_auth_json;
|
use codex_core::auth::try_read_auth_json;
|
||||||
use codex_core::config::Config;
|
use codex_core::config::Config;
|
||||||
use codex_core::config::ConfigOverrides;
|
use codex_core::config::ConfigOverrides;
|
||||||
@@ -39,7 +40,6 @@ use codex_protocol::mcp_protocol::ApplyPatchApprovalParams;
|
|||||||
use codex_protocol::mcp_protocol::ApplyPatchApprovalResponse;
|
use codex_protocol::mcp_protocol::ApplyPatchApprovalResponse;
|
||||||
use codex_protocol::mcp_protocol::ArchiveConversationParams;
|
use codex_protocol::mcp_protocol::ArchiveConversationParams;
|
||||||
use codex_protocol::mcp_protocol::ArchiveConversationResponse;
|
use codex_protocol::mcp_protocol::ArchiveConversationResponse;
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
|
||||||
use codex_protocol::mcp_protocol::AuthStatusChangeNotification;
|
use codex_protocol::mcp_protocol::AuthStatusChangeNotification;
|
||||||
use codex_protocol::mcp_protocol::ClientRequest;
|
use codex_protocol::mcp_protocol::ClientRequest;
|
||||||
use codex_protocol::mcp_protocol::ConversationId;
|
use codex_protocol::mcp_protocol::ConversationId;
|
||||||
@@ -57,6 +57,8 @@ use codex_protocol::mcp_protocol::InterruptConversationParams;
|
|||||||
use codex_protocol::mcp_protocol::InterruptConversationResponse;
|
use codex_protocol::mcp_protocol::InterruptConversationResponse;
|
||||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||||
use codex_protocol::mcp_protocol::ListConversationsResponse;
|
use codex_protocol::mcp_protocol::ListConversationsResponse;
|
||||||
|
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||||
|
use codex_protocol::mcp_protocol::LoginApiKeyResponse;
|
||||||
use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification;
|
use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification;
|
||||||
use codex_protocol::mcp_protocol::LoginChatGptResponse;
|
use codex_protocol::mcp_protocol::LoginChatGptResponse;
|
||||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||||
@@ -172,6 +174,9 @@ impl CodexMessageProcessor {
|
|||||||
ClientRequest::GitDiffToRemote { request_id, params } => {
|
ClientRequest::GitDiffToRemote { request_id, params } => {
|
||||||
self.git_diff_to_origin(request_id, params.cwd).await;
|
self.git_diff_to_origin(request_id, params.cwd).await;
|
||||||
}
|
}
|
||||||
|
ClientRequest::LoginApiKey { request_id, params } => {
|
||||||
|
self.login_api_key(request_id, params).await;
|
||||||
|
}
|
||||||
ClientRequest::LoginChatGpt { request_id } => {
|
ClientRequest::LoginChatGpt { request_id } => {
|
||||||
self.login_chatgpt(request_id).await;
|
self.login_chatgpt(request_id).await;
|
||||||
}
|
}
|
||||||
@@ -199,6 +204,39 @@ impl CodexMessageProcessor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn login_api_key(&mut self, request_id: RequestId, params: LoginApiKeyParams) {
|
||||||
|
{
|
||||||
|
let mut guard = self.active_login.lock().await;
|
||||||
|
if let Some(active) = guard.take() {
|
||||||
|
active.drop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match login_with_api_key(&self.config.codex_home, ¶ms.api_key) {
|
||||||
|
Ok(()) => {
|
||||||
|
self.auth_manager.reload();
|
||||||
|
self.outgoing
|
||||||
|
.send_response(request_id, LoginApiKeyResponse {})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let payload = AuthStatusChangeNotification {
|
||||||
|
auth_method: self.auth_manager.auth().map(|auth| auth.mode),
|
||||||
|
};
|
||||||
|
self.outgoing
|
||||||
|
.send_server_notification(ServerNotification::AuthStatusChange(payload))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let error = JSONRPCErrorError {
|
||||||
|
code: INTERNAL_ERROR_CODE,
|
||||||
|
message: format!("failed to save api key: {err}"),
|
||||||
|
data: None,
|
||||||
|
};
|
||||||
|
self.outgoing.send_error(request_id, error).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn login_chatgpt(&mut self, request_id: RequestId) {
|
async fn login_chatgpt(&mut self, request_id: RequestId) {
|
||||||
let config = self.config.as_ref();
|
let config = self.config.as_ref();
|
||||||
|
|
||||||
@@ -352,7 +390,7 @@ impl CodexMessageProcessor {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Send auth status change notification reflecting the current auth mode
|
// Send auth status change notification reflecting the current auth mode
|
||||||
// after logout (which may fall back to API key via env var).
|
// after logout.
|
||||||
let current_auth_method = self.auth_manager.auth().map(|auth| auth.mode);
|
let current_auth_method = self.auth_manager.auth().map(|auth| auth.mode);
|
||||||
let payload = AuthStatusChangeNotification {
|
let payload = AuthStatusChangeNotification {
|
||||||
auth_method: current_auth_method,
|
auth_method: current_auth_method,
|
||||||
@@ -367,7 +405,6 @@ impl CodexMessageProcessor {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
params: codex_protocol::mcp_protocol::GetAuthStatusParams,
|
params: codex_protocol::mcp_protocol::GetAuthStatusParams,
|
||||||
) {
|
) {
|
||||||
let preferred_auth_method: AuthMode = self.auth_manager.preferred_auth_method();
|
|
||||||
let include_token = params.include_token.unwrap_or(false);
|
let include_token = params.include_token.unwrap_or(false);
|
||||||
let do_refresh = params.refresh_token.unwrap_or(false);
|
let do_refresh = params.refresh_token.unwrap_or(false);
|
||||||
|
|
||||||
@@ -375,6 +412,11 @@ impl CodexMessageProcessor {
|
|||||||
tracing::warn!("failed to refresh token while getting auth status: {err}");
|
tracing::warn!("failed to refresh token while getting auth status: {err}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Determine whether auth is required based on the active model provider.
|
||||||
|
// If a custom provider is configured with `requires_openai_auth == false`,
|
||||||
|
// then no auth step is required; otherwise, default to requiring auth.
|
||||||
|
let requires_openai_auth = Some(self.config.model_provider.requires_openai_auth);
|
||||||
|
|
||||||
let response = match self.auth_manager.auth() {
|
let response = match self.auth_manager.auth() {
|
||||||
Some(auth) => {
|
Some(auth) => {
|
||||||
let (reported_auth_method, token_opt) = match auth.get_token().await {
|
let (reported_auth_method, token_opt) = match auth.get_token().await {
|
||||||
@@ -390,14 +432,14 @@ impl CodexMessageProcessor {
|
|||||||
};
|
};
|
||||||
codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||||
auth_method: reported_auth_method,
|
auth_method: reported_auth_method,
|
||||||
preferred_auth_method,
|
|
||||||
auth_token: token_opt,
|
auth_token: token_opt,
|
||||||
|
requires_openai_auth,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
None => codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||||
auth_method: None,
|
auth_method: None,
|
||||||
preferred_auth_method,
|
|
||||||
auth_token: None,
|
auth_token: None,
|
||||||
|
requires_openai_auth,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -56,8 +56,7 @@ impl MessageProcessor {
|
|||||||
config: Arc<Config>,
|
config: Arc<Config>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let outgoing = Arc::new(outgoing);
|
let outgoing = Arc::new(outgoing);
|
||||||
let auth_manager =
|
let auth_manager = AuthManager::shared(config.codex_home.clone());
|
||||||
AuthManager::shared(config.codex_home.clone(), config.preferred_auth_method);
|
|
||||||
let conversation_manager = Arc::new(ConversationManager::new(auth_manager.clone()));
|
let conversation_manager = Arc::new(ConversationManager::new(auth_manager.clone()));
|
||||||
let codex_message_processor = CodexMessageProcessor::new(
|
let codex_message_processor = CodexMessageProcessor::new(
|
||||||
auth_manager,
|
auth_manager,
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
|||||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||||
|
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||||
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
||||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||||
@@ -318,6 +319,15 @@ impl McpProcess {
|
|||||||
self.send_request("resumeConversation", params).await
|
self.send_request("resumeConversation", params).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Send a `loginApiKey` JSON-RPC request.
|
||||||
|
pub async fn send_login_api_key_request(
|
||||||
|
&mut self,
|
||||||
|
params: LoginApiKeyParams,
|
||||||
|
) -> anyhow::Result<i64> {
|
||||||
|
let params = Some(serde_json::to_value(params)?);
|
||||||
|
self.send_request("loginApiKey", params).await
|
||||||
|
}
|
||||||
|
|
||||||
/// Send a `loginChatGpt` JSON-RPC request.
|
/// Send a `loginChatGpt` JSON-RPC request.
|
||||||
pub async fn send_login_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
pub async fn send_login_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||||
self.send_request("loginChatGpt", None).await
|
self.send_request("loginChatGpt", None).await
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use codex_core::auth::login_with_api_key;
|
|
||||||
use codex_protocol::mcp_protocol::AuthMode;
|
use codex_protocol::mcp_protocol::AuthMode;
|
||||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||||
use codex_protocol::mcp_protocol::GetAuthStatusResponse;
|
use codex_protocol::mcp_protocol::GetAuthStatusResponse;
|
||||||
|
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||||
|
use codex_protocol::mcp_protocol::LoginApiKeyResponse;
|
||||||
use mcp_test_support::McpProcess;
|
use mcp_test_support::McpProcess;
|
||||||
use mcp_test_support::to_response;
|
use mcp_test_support::to_response;
|
||||||
use mcp_types::JSONRPCResponse;
|
use mcp_types::JSONRPCResponse;
|
||||||
@@ -36,10 +37,29 @@ stream_max_retries = 0
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) {
|
||||||
|
let request_id = mcp
|
||||||
|
.send_login_api_key_request(LoginApiKeyParams {
|
||||||
|
api_key: api_key.to_string(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|e| panic!("send loginApiKey: {e}"));
|
||||||
|
|
||||||
|
let resp: JSONRPCResponse = timeout(
|
||||||
|
DEFAULT_READ_TIMEOUT,
|
||||||
|
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|e| panic!("loginApiKey timeout: {e}"))
|
||||||
|
.unwrap_or_else(|e| panic!("loginApiKey response: {e}"));
|
||||||
|
let _: LoginApiKeyResponse =
|
||||||
|
to_response(resp).unwrap_or_else(|e| panic!("deserialize login response: {e}"));
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||||
async fn get_auth_status_no_auth() {
|
async fn get_auth_status_no_auth() {
|
||||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||||
|
|
||||||
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
let mut mcp = McpProcess::new_with_env(codex_home.path(), &[("OPENAI_API_KEY", None)])
|
||||||
.await
|
.await
|
||||||
@@ -72,8 +92,7 @@ async fn get_auth_status_no_auth() {
|
|||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||||
async fn get_auth_status_with_api_key() {
|
async fn get_auth_status_with_api_key() {
|
||||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
|
||||||
|
|
||||||
let mut mcp = McpProcess::new(codex_home.path())
|
let mut mcp = McpProcess::new(codex_home.path())
|
||||||
.await
|
.await
|
||||||
@@ -83,6 +102,8 @@ async fn get_auth_status_with_api_key() {
|
|||||||
.expect("init timeout")
|
.expect("init timeout")
|
||||||
.expect("init failed");
|
.expect("init failed");
|
||||||
|
|
||||||
|
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||||
|
|
||||||
let request_id = mcp
|
let request_id = mcp
|
||||||
.send_get_auth_status_request(GetAuthStatusParams {
|
.send_get_auth_status_request(GetAuthStatusParams {
|
||||||
include_token: Some(true),
|
include_token: Some(true),
|
||||||
@@ -101,14 +122,12 @@ async fn get_auth_status_with_api_key() {
|
|||||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||||
assert_eq!(status.auth_token, Some("sk-test-key".to_string()));
|
assert_eq!(status.auth_token, Some("sk-test-key".to_string()));
|
||||||
assert_eq!(status.preferred_auth_method, AuthMode::ChatGPT);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||||
async fn get_auth_status_with_api_key_no_include_token() {
|
async fn get_auth_status_with_api_key_no_include_token() {
|
||||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||||
login_with_api_key(codex_home.path(), "sk-test-key").expect("seed api key");
|
|
||||||
|
|
||||||
let mut mcp = McpProcess::new(codex_home.path())
|
let mut mcp = McpProcess::new(codex_home.path())
|
||||||
.await
|
.await
|
||||||
@@ -118,6 +137,8 @@ async fn get_auth_status_with_api_key_no_include_token() {
|
|||||||
.expect("init timeout")
|
.expect("init timeout")
|
||||||
.expect("init failed");
|
.expect("init failed");
|
||||||
|
|
||||||
|
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||||
|
|
||||||
// Build params via struct so None field is omitted in wire JSON.
|
// Build params via struct so None field is omitted in wire JSON.
|
||||||
let params = GetAuthStatusParams {
|
let params = GetAuthStatusParams {
|
||||||
include_token: None,
|
include_token: None,
|
||||||
@@ -138,5 +159,4 @@ async fn get_auth_status_with_api_key_no_include_token() {
|
|||||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||||
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
|
||||||
assert!(status.auth_token.is_none(), "token must be omitted");
|
assert!(status.auth_token.is_none(), "token must be omitted");
|
||||||
assert_eq!(status.preferred_auth_method, AuthMode::ChatGPT);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use codex_core::auth::login_with_api_key;
|
use codex_login::login_with_api_key;
|
||||||
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
||||||
use codex_protocol::mcp_protocol::CancelLoginChatGptResponse;
|
use codex_protocol::mcp_protocol::CancelLoginChatGptResponse;
|
||||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||||
@@ -95,7 +95,7 @@ async fn logout_chatgpt_removes_auth() {
|
|||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||||
async fn login_and_cancel_chatgpt() {
|
async fn login_and_cancel_chatgpt() {
|
||||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||||
create_config_toml(codex_home.path()).expect("write config.toml");
|
create_config_toml(codex_home.path()).unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||||
|
|
||||||
let mut mcp = McpProcess::new(codex_home.path())
|
let mut mcp = McpProcess::new(codex_home.path())
|
||||||
.await
|
.await
|
||||||
|
|||||||
@@ -31,6 +31,8 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
|||||||
codex_protocol::mcp_protocol::SendUserTurnResponse::export_all_to(out_dir)?;
|
codex_protocol::mcp_protocol::SendUserTurnResponse::export_all_to(out_dir)?;
|
||||||
codex_protocol::mcp_protocol::InterruptConversationResponse::export_all_to(out_dir)?;
|
codex_protocol::mcp_protocol::InterruptConversationResponse::export_all_to(out_dir)?;
|
||||||
codex_protocol::mcp_protocol::GitDiffToRemoteResponse::export_all_to(out_dir)?;
|
codex_protocol::mcp_protocol::GitDiffToRemoteResponse::export_all_to(out_dir)?;
|
||||||
|
codex_protocol::mcp_protocol::LoginApiKeyParams::export_all_to(out_dir)?;
|
||||||
|
codex_protocol::mcp_protocol::LoginApiKeyResponse::export_all_to(out_dir)?;
|
||||||
codex_protocol::mcp_protocol::LoginChatGptResponse::export_all_to(out_dir)?;
|
codex_protocol::mcp_protocol::LoginChatGptResponse::export_all_to(out_dir)?;
|
||||||
codex_protocol::mcp_protocol::CancelLoginChatGptResponse::export_all_to(out_dir)?;
|
codex_protocol::mcp_protocol::CancelLoginChatGptResponse::export_all_to(out_dir)?;
|
||||||
codex_protocol::mcp_protocol::LogoutChatGptResponse::export_all_to(out_dir)?;
|
codex_protocol::mcp_protocol::LogoutChatGptResponse::export_all_to(out_dir)?;
|
||||||
|
|||||||
@@ -126,6 +126,11 @@ pub enum ClientRequest {
|
|||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
params: GitDiffToRemoteParams,
|
params: GitDiffToRemoteParams,
|
||||||
},
|
},
|
||||||
|
LoginApiKey {
|
||||||
|
#[serde(rename = "id")]
|
||||||
|
request_id: RequestId,
|
||||||
|
params: LoginApiKeyParams,
|
||||||
|
},
|
||||||
LoginChatGpt {
|
LoginChatGpt {
|
||||||
#[serde(rename = "id")]
|
#[serde(rename = "id")]
|
||||||
request_id: RequestId,
|
request_id: RequestId,
|
||||||
@@ -288,6 +293,16 @@ pub struct ArchiveConversationResponse {}
|
|||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct RemoveConversationSubscriptionResponse {}
|
pub struct RemoveConversationSubscriptionResponse {}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct LoginApiKeyParams {
|
||||||
|
pub api_key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct LoginApiKeyResponse {}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct LoginChatGptResponse {
|
pub struct LoginChatGptResponse {
|
||||||
@@ -367,9 +382,14 @@ pub struct ExecArbitraryCommandResponse {
|
|||||||
pub struct GetAuthStatusResponse {
|
pub struct GetAuthStatusResponse {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub auth_method: Option<AuthMode>,
|
pub auth_method: Option<AuthMode>,
|
||||||
pub preferred_auth_method: AuthMode,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub auth_token: Option<String>,
|
pub auth_token: Option<String>,
|
||||||
|
|
||||||
|
// Indicates that auth method must be valid to use the server.
|
||||||
|
// This can be false if using a custom provider that is configured
|
||||||
|
// with requires_openai_auth == false.
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub requires_openai_auth: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)]
|
||||||
|
|||||||
@@ -308,7 +308,7 @@ async fn run_ratatui_app(
|
|||||||
..
|
..
|
||||||
} = cli;
|
} = cli;
|
||||||
|
|
||||||
let auth_manager = AuthManager::shared(config.codex_home.clone(), config.preferred_auth_method);
|
let auth_manager = AuthManager::shared(config.codex_home.clone());
|
||||||
let login_status = get_login_status(&config);
|
let login_status = get_login_status(&config);
|
||||||
let should_show_onboarding =
|
let should_show_onboarding =
|
||||||
should_show_onboarding(login_status, &config, should_show_trust_screen);
|
should_show_onboarding(login_status, &config, should_show_trust_screen);
|
||||||
@@ -392,7 +392,7 @@ fn get_login_status(config: &Config) -> LoginStatus {
|
|||||||
// Reading the OpenAI API key is an async operation because it may need
|
// Reading the OpenAI API key is an async operation because it may need
|
||||||
// to refresh the token. Block on it.
|
// to refresh the token. Block on it.
|
||||||
let codex_home = config.codex_home.clone();
|
let codex_home = config.codex_home.clone();
|
||||||
match CodexAuth::from_codex_home(&codex_home, config.preferred_auth_method) {
|
match CodexAuth::from_codex_home(&codex_home) {
|
||||||
Ok(Some(auth)) => LoginStatus::AuthMode(auth.mode),
|
Ok(Some(auth)) => LoginStatus::AuthMode(auth.mode),
|
||||||
Ok(None) => LoginStatus::NotAuthenticated,
|
Ok(None) => LoginStatus::NotAuthenticated,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@@ -460,60 +460,28 @@ fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
match login_status {
|
login_status == LoginStatus::NotAuthenticated
|
||||||
LoginStatus::NotAuthenticated => true,
|
|
||||||
LoginStatus::AuthMode(method) => method != config.preferred_auth_method,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn make_config(preferred: AuthMode) -> Config {
|
fn make_config() -> Config {
|
||||||
let mut cfg = Config::load_from_base_config_with_overrides(
|
Config::load_from_base_config_with_overrides(
|
||||||
ConfigToml::default(),
|
ConfigToml::default(),
|
||||||
ConfigOverrides::default(),
|
ConfigOverrides::default(),
|
||||||
std::env::temp_dir(),
|
std::env::temp_dir(),
|
||||||
)
|
)
|
||||||
.expect("load default config");
|
.expect("load default config")
|
||||||
cfg.preferred_auth_method = preferred;
|
|
||||||
cfg
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn shows_login_when_not_authenticated() {
|
fn shows_login_when_not_authenticated() {
|
||||||
let cfg = make_config(AuthMode::ChatGPT);
|
let cfg = make_config();
|
||||||
assert!(should_show_login_screen(
|
assert!(should_show_login_screen(
|
||||||
LoginStatus::NotAuthenticated,
|
LoginStatus::NotAuthenticated,
|
||||||
&cfg
|
&cfg
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn shows_login_when_api_key_but_prefers_chatgpt() {
|
|
||||||
let cfg = make_config(AuthMode::ChatGPT);
|
|
||||||
assert!(should_show_login_screen(
|
|
||||||
LoginStatus::AuthMode(AuthMode::ApiKey),
|
|
||||||
&cfg
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn hides_login_when_api_key_and_prefers_api_key() {
|
|
||||||
let cfg = make_config(AuthMode::ApiKey);
|
|
||||||
assert!(!should_show_login_screen(
|
|
||||||
LoginStatus::AuthMode(AuthMode::ApiKey),
|
|
||||||
&cfg
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn hides_login_when_chatgpt_and_prefers_chatgpt() {
|
|
||||||
let cfg = make_config(AuthMode::ChatGPT);
|
|
||||||
assert!(!should_show_login_screen(
|
|
||||||
LoginStatus::AuthMode(AuthMode::ChatGPT),
|
|
||||||
&cfg
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,12 +2,17 @@
|
|||||||
|
|
||||||
use codex_core::AuthManager;
|
use codex_core::AuthManager;
|
||||||
use codex_core::auth::CLIENT_ID;
|
use codex_core::auth::CLIENT_ID;
|
||||||
|
use codex_core::auth::login_with_api_key;
|
||||||
|
use codex_core::auth::read_openai_api_key_from_env;
|
||||||
use codex_login::ServerOptions;
|
use codex_login::ServerOptions;
|
||||||
use codex_login::ShutdownHandle;
|
use codex_login::ShutdownHandle;
|
||||||
use codex_login::run_login_server;
|
use codex_login::run_login_server;
|
||||||
use crossterm::event::KeyCode;
|
use crossterm::event::KeyCode;
|
||||||
use crossterm::event::KeyEvent;
|
use crossterm::event::KeyEvent;
|
||||||
|
use crossterm::event::KeyModifiers;
|
||||||
use ratatui::buffer::Buffer;
|
use ratatui::buffer::Buffer;
|
||||||
|
use ratatui::layout::Constraint;
|
||||||
|
use ratatui::layout::Layout;
|
||||||
use ratatui::layout::Rect;
|
use ratatui::layout::Rect;
|
||||||
use ratatui::prelude::Widget;
|
use ratatui::prelude::Widget;
|
||||||
use ratatui::style::Color;
|
use ratatui::style::Color;
|
||||||
@@ -15,6 +20,9 @@ use ratatui::style::Modifier;
|
|||||||
use ratatui::style::Style;
|
use ratatui::style::Style;
|
||||||
use ratatui::style::Stylize;
|
use ratatui::style::Stylize;
|
||||||
use ratatui::text::Line;
|
use ratatui::text::Line;
|
||||||
|
use ratatui::widgets::Block;
|
||||||
|
use ratatui::widgets::BorderType;
|
||||||
|
use ratatui::widgets::Borders;
|
||||||
use ratatui::widgets::Paragraph;
|
use ratatui::widgets::Paragraph;
|
||||||
use ratatui::widgets::WidgetRef;
|
use ratatui::widgets::WidgetRef;
|
||||||
use ratatui::widgets::Wrap;
|
use ratatui::widgets::Wrap;
|
||||||
@@ -38,8 +46,14 @@ pub(crate) enum SignInState {
|
|||||||
ChatGptContinueInBrowser(ContinueInBrowserState),
|
ChatGptContinueInBrowser(ContinueInBrowserState),
|
||||||
ChatGptSuccessMessage,
|
ChatGptSuccessMessage,
|
||||||
ChatGptSuccess,
|
ChatGptSuccess,
|
||||||
EnvVarMissing,
|
ApiKeyEntry(ApiKeyInputState),
|
||||||
EnvVarFound,
|
ApiKeyConfigured,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub(crate) struct ApiKeyInputState {
|
||||||
|
value: String,
|
||||||
|
prepopulated_from_env: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -59,6 +73,10 @@ impl Drop for ContinueInBrowserState {
|
|||||||
|
|
||||||
impl KeyboardHandler for AuthModeWidget {
|
impl KeyboardHandler for AuthModeWidget {
|
||||||
fn handle_key_event(&mut self, key_event: KeyEvent) {
|
fn handle_key_event(&mut self, key_event: KeyEvent) {
|
||||||
|
if self.handle_api_key_entry_key_event(&key_event) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
match key_event.code {
|
match key_event.code {
|
||||||
KeyCode::Up | KeyCode::Char('k') => {
|
KeyCode::Up | KeyCode::Char('k') => {
|
||||||
self.highlighted_mode = AuthMode::ChatGPT;
|
self.highlighted_mode = AuthMode::ChatGPT;
|
||||||
@@ -69,7 +87,7 @@ impl KeyboardHandler for AuthModeWidget {
|
|||||||
KeyCode::Char('1') => {
|
KeyCode::Char('1') => {
|
||||||
self.start_chatgpt_login();
|
self.start_chatgpt_login();
|
||||||
}
|
}
|
||||||
KeyCode::Char('2') => self.verify_api_key(),
|
KeyCode::Char('2') => self.start_api_key_entry(),
|
||||||
KeyCode::Enter => {
|
KeyCode::Enter => {
|
||||||
let sign_in_state = { (*self.sign_in_state.read().unwrap()).clone() };
|
let sign_in_state = { (*self.sign_in_state.read().unwrap()).clone() };
|
||||||
match sign_in_state {
|
match sign_in_state {
|
||||||
@@ -78,12 +96,9 @@ impl KeyboardHandler for AuthModeWidget {
|
|||||||
self.start_chatgpt_login();
|
self.start_chatgpt_login();
|
||||||
}
|
}
|
||||||
AuthMode::ApiKey => {
|
AuthMode::ApiKey => {
|
||||||
self.verify_api_key();
|
self.start_api_key_entry();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
SignInState::EnvVarMissing => {
|
|
||||||
*self.sign_in_state.write().unwrap() = SignInState::PickMode;
|
|
||||||
}
|
|
||||||
SignInState::ChatGptSuccessMessage => {
|
SignInState::ChatGptSuccessMessage => {
|
||||||
*self.sign_in_state.write().unwrap() = SignInState::ChatGptSuccess;
|
*self.sign_in_state.write().unwrap() = SignInState::ChatGptSuccess;
|
||||||
}
|
}
|
||||||
@@ -101,6 +116,10 @@ impl KeyboardHandler for AuthModeWidget {
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_paste(&mut self, pasted: String) {
|
||||||
|
let _ = self.handle_api_key_entry_paste(pasted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -111,7 +130,6 @@ pub(crate) struct AuthModeWidget {
|
|||||||
pub sign_in_state: Arc<RwLock<SignInState>>,
|
pub sign_in_state: Arc<RwLock<SignInState>>,
|
||||||
pub codex_home: PathBuf,
|
pub codex_home: PathBuf,
|
||||||
pub login_status: LoginStatus,
|
pub login_status: LoginStatus,
|
||||||
pub preferred_auth_method: AuthMode,
|
|
||||||
pub auth_manager: Arc<AuthManager>,
|
pub auth_manager: Arc<AuthManager>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,24 +147,6 @@ impl AuthModeWidget {
|
|||||||
"".into(),
|
"".into(),
|
||||||
];
|
];
|
||||||
|
|
||||||
// If the user is already authenticated but the method differs from their
|
|
||||||
// preferred auth method, show a brief explanation.
|
|
||||||
if let LoginStatus::AuthMode(current) = self.login_status
|
|
||||||
&& current != self.preferred_auth_method
|
|
||||||
{
|
|
||||||
let to_label = |mode: AuthMode| match mode {
|
|
||||||
AuthMode::ApiKey => "API key",
|
|
||||||
AuthMode::ChatGPT => "ChatGPT",
|
|
||||||
};
|
|
||||||
let msg = format!(
|
|
||||||
" You’re currently using {} while your preferred method is {}.",
|
|
||||||
to_label(current),
|
|
||||||
to_label(self.preferred_auth_method)
|
|
||||||
);
|
|
||||||
lines.push(msg.into());
|
|
||||||
lines.push("".into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let create_mode_item = |idx: usize,
|
let create_mode_item = |idx: usize,
|
||||||
selected_mode: AuthMode,
|
selected_mode: AuthMode,
|
||||||
text: &str,
|
text: &str,
|
||||||
@@ -175,29 +175,17 @@ impl AuthModeWidget {
|
|||||||
|
|
||||||
vec![line1, line2]
|
vec![line1, line2]
|
||||||
};
|
};
|
||||||
let chatgpt_label = if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ChatGPT))
|
|
||||||
{
|
|
||||||
"Continue using ChatGPT"
|
|
||||||
} else {
|
|
||||||
"Sign in with ChatGPT"
|
|
||||||
};
|
|
||||||
|
|
||||||
lines.extend(create_mode_item(
|
lines.extend(create_mode_item(
|
||||||
0,
|
0,
|
||||||
AuthMode::ChatGPT,
|
AuthMode::ChatGPT,
|
||||||
chatgpt_label,
|
"Sign in with ChatGPT",
|
||||||
"Usage included with Plus, Pro, and Team plans",
|
"Usage included with Plus, Pro, and Team plans",
|
||||||
));
|
));
|
||||||
let api_key_label = if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ApiKey))
|
|
||||||
{
|
|
||||||
"Continue using API key"
|
|
||||||
} else {
|
|
||||||
"Provide your own API key"
|
|
||||||
};
|
|
||||||
lines.extend(create_mode_item(
|
lines.extend(create_mode_item(
|
||||||
1,
|
1,
|
||||||
AuthMode::ApiKey,
|
AuthMode::ApiKey,
|
||||||
api_key_label,
|
"Provide your own API key",
|
||||||
"Pay for what you use",
|
"Pay for what you use",
|
||||||
));
|
));
|
||||||
lines.push("".into());
|
lines.push("".into());
|
||||||
@@ -282,26 +270,213 @@ impl AuthModeWidget {
|
|||||||
.render(area, buf);
|
.render(area, buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_env_var_found(&self, area: Rect, buf: &mut Buffer) {
|
fn render_api_key_configured(&self, area: Rect, buf: &mut Buffer) {
|
||||||
let lines = vec!["✓ Using OPENAI_API_KEY".fg(Color::Green).into()];
|
let lines = vec![
|
||||||
|
"✓ API key configured".fg(Color::Green).into(),
|
||||||
|
"".into(),
|
||||||
|
" Codex will use usage-based billing with your API key.".into(),
|
||||||
|
];
|
||||||
|
|
||||||
Paragraph::new(lines)
|
Paragraph::new(lines)
|
||||||
.wrap(Wrap { trim: false })
|
.wrap(Wrap { trim: false })
|
||||||
.render(area, buf);
|
.render(area, buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_env_var_missing(&self, area: Rect, buf: &mut Buffer) {
|
fn render_api_key_entry(&self, area: Rect, buf: &mut Buffer, state: &ApiKeyInputState) {
|
||||||
let lines = vec![
|
let [intro_area, input_area, footer_area] = Layout::vertical([
|
||||||
" To use Codex with the OpenAI API, set OPENAI_API_KEY in your environment"
|
Constraint::Min(4),
|
||||||
.fg(Color::Cyan)
|
Constraint::Length(3),
|
||||||
.into(),
|
Constraint::Min(2),
|
||||||
"".into(),
|
])
|
||||||
" Press Enter to return".dim().into(),
|
.areas(area);
|
||||||
];
|
|
||||||
|
|
||||||
Paragraph::new(lines)
|
let mut intro_lines: Vec<Line> = vec![
|
||||||
|
Line::from(vec![
|
||||||
|
"> ".into(),
|
||||||
|
"Use your own OpenAI API key for usage-based billing".bold(),
|
||||||
|
]),
|
||||||
|
"".into(),
|
||||||
|
" Paste or type your API key below. It will be stored locally in auth.json.".into(),
|
||||||
|
"".into(),
|
||||||
|
];
|
||||||
|
if state.prepopulated_from_env {
|
||||||
|
intro_lines.push(" Detected OPENAI_API_KEY environment variable.".into());
|
||||||
|
intro_lines.push(
|
||||||
|
" Paste a different key if you prefer to use another account."
|
||||||
|
.dim()
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
intro_lines.push("".into());
|
||||||
|
}
|
||||||
|
Paragraph::new(intro_lines)
|
||||||
.wrap(Wrap { trim: false })
|
.wrap(Wrap { trim: false })
|
||||||
.render(area, buf);
|
.render(intro_area, buf);
|
||||||
|
|
||||||
|
let content_line: Line = if state.value.is_empty() {
|
||||||
|
vec!["Paste or type your API key".dim()].into()
|
||||||
|
} else {
|
||||||
|
Line::from(state.value.clone())
|
||||||
|
};
|
||||||
|
Paragraph::new(content_line)
|
||||||
|
.wrap(Wrap { trim: false })
|
||||||
|
.block(
|
||||||
|
Block::default()
|
||||||
|
.title("API key")
|
||||||
|
.borders(Borders::ALL)
|
||||||
|
.border_type(BorderType::Rounded)
|
||||||
|
.border_style(Style::default().fg(Color::Cyan)),
|
||||||
|
)
|
||||||
|
.render(input_area, buf);
|
||||||
|
|
||||||
|
let mut footer_lines: Vec<Line> = vec![
|
||||||
|
" Press Enter to save".dim().into(),
|
||||||
|
" Press Esc to go back".dim().into(),
|
||||||
|
];
|
||||||
|
if let Some(error) = &self.error {
|
||||||
|
footer_lines.push("".into());
|
||||||
|
footer_lines.push(error.as_str().red().into());
|
||||||
|
}
|
||||||
|
Paragraph::new(footer_lines)
|
||||||
|
.wrap(Wrap { trim: false })
|
||||||
|
.render(footer_area, buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_api_key_entry_key_event(&mut self, key_event: &KeyEvent) -> bool {
|
||||||
|
let mut should_save: Option<String> = None;
|
||||||
|
let mut should_request_frame = false;
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut guard = self.sign_in_state.write().unwrap();
|
||||||
|
if let SignInState::ApiKeyEntry(state) = &mut *guard {
|
||||||
|
match key_event.code {
|
||||||
|
KeyCode::Esc => {
|
||||||
|
*guard = SignInState::PickMode;
|
||||||
|
self.error = None;
|
||||||
|
should_request_frame = true;
|
||||||
|
}
|
||||||
|
KeyCode::Enter => {
|
||||||
|
let trimmed = state.value.trim().to_string();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
self.error = Some("API key cannot be empty".to_string());
|
||||||
|
should_request_frame = true;
|
||||||
|
} else {
|
||||||
|
should_save = Some(trimmed);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
KeyCode::Backspace => {
|
||||||
|
if state.prepopulated_from_env {
|
||||||
|
state.value.clear();
|
||||||
|
state.prepopulated_from_env = false;
|
||||||
|
} else {
|
||||||
|
state.value.pop();
|
||||||
|
}
|
||||||
|
self.error = None;
|
||||||
|
should_request_frame = true;
|
||||||
|
}
|
||||||
|
KeyCode::Char(c)
|
||||||
|
if !key_event.modifiers.contains(KeyModifiers::CONTROL)
|
||||||
|
&& !key_event.modifiers.contains(KeyModifiers::ALT) =>
|
||||||
|
{
|
||||||
|
if state.prepopulated_from_env {
|
||||||
|
state.value.clear();
|
||||||
|
state.prepopulated_from_env = false;
|
||||||
|
}
|
||||||
|
state.value.push(c);
|
||||||
|
self.error = None;
|
||||||
|
should_request_frame = true;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
// handled; let guard drop before potential save
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(api_key) = should_save {
|
||||||
|
self.save_api_key(api_key);
|
||||||
|
} else if should_request_frame {
|
||||||
|
self.request_frame.schedule_frame();
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_api_key_entry_paste(&mut self, pasted: String) -> bool {
|
||||||
|
let trimmed = pasted.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut guard = self.sign_in_state.write().unwrap();
|
||||||
|
if let SignInState::ApiKeyEntry(state) = &mut *guard {
|
||||||
|
if state.prepopulated_from_env {
|
||||||
|
state.value = trimmed.to_string();
|
||||||
|
state.prepopulated_from_env = false;
|
||||||
|
} else {
|
||||||
|
state.value.push_str(trimmed);
|
||||||
|
}
|
||||||
|
self.error = None;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
drop(guard);
|
||||||
|
self.request_frame.schedule_frame();
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn start_api_key_entry(&mut self) {
|
||||||
|
self.error = None;
|
||||||
|
let prefill_from_env = read_openai_api_key_from_env();
|
||||||
|
let mut guard = self.sign_in_state.write().unwrap();
|
||||||
|
match &mut *guard {
|
||||||
|
SignInState::ApiKeyEntry(state) => {
|
||||||
|
if state.value.is_empty() {
|
||||||
|
if let Some(prefill) = prefill_from_env.clone() {
|
||||||
|
state.value = prefill;
|
||||||
|
state.prepopulated_from_env = true;
|
||||||
|
} else {
|
||||||
|
state.prepopulated_from_env = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
*guard = SignInState::ApiKeyEntry(ApiKeyInputState {
|
||||||
|
value: prefill_from_env.clone().unwrap_or_default(),
|
||||||
|
prepopulated_from_env: prefill_from_env.is_some(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
drop(guard);
|
||||||
|
self.request_frame.schedule_frame();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save_api_key(&mut self, api_key: String) {
|
||||||
|
match login_with_api_key(&self.codex_home, &api_key) {
|
||||||
|
Ok(()) => {
|
||||||
|
self.error = None;
|
||||||
|
self.login_status = LoginStatus::AuthMode(AuthMode::ApiKey);
|
||||||
|
self.auth_manager.reload();
|
||||||
|
*self.sign_in_state.write().unwrap() = SignInState::ApiKeyConfigured;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
self.error = Some(format!("Failed to save API key: {err}"));
|
||||||
|
let mut guard = self.sign_in_state.write().unwrap();
|
||||||
|
if let SignInState::ApiKeyEntry(existing) = &mut *guard {
|
||||||
|
if existing.value.is_empty() {
|
||||||
|
existing.value.push_str(&api_key);
|
||||||
|
}
|
||||||
|
existing.prepopulated_from_env = false;
|
||||||
|
} else {
|
||||||
|
*guard = SignInState::ApiKeyEntry(ApiKeyInputState {
|
||||||
|
value: api_key,
|
||||||
|
prepopulated_from_env: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.request_frame.schedule_frame();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start_chatgpt_login(&mut self) {
|
fn start_chatgpt_login(&mut self) {
|
||||||
@@ -354,18 +529,6 @@ impl AuthModeWidget {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO: Read/write from the correct hierarchy config overrides + auth json + OPENAI_API_KEY.
|
|
||||||
fn verify_api_key(&mut self) {
|
|
||||||
if matches!(self.login_status, LoginStatus::AuthMode(AuthMode::ApiKey)) {
|
|
||||||
// We already have an API key configured (e.g., from auth.json or env),
|
|
||||||
// so mark this step complete immediately.
|
|
||||||
*self.sign_in_state.write().unwrap() = SignInState::EnvVarFound;
|
|
||||||
} else {
|
|
||||||
*self.sign_in_state.write().unwrap() = SignInState::EnvVarMissing;
|
|
||||||
}
|
|
||||||
self.request_frame.schedule_frame();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StepStateProvider for AuthModeWidget {
|
impl StepStateProvider for AuthModeWidget {
|
||||||
@@ -373,10 +536,10 @@ impl StepStateProvider for AuthModeWidget {
|
|||||||
let sign_in_state = self.sign_in_state.read().unwrap();
|
let sign_in_state = self.sign_in_state.read().unwrap();
|
||||||
match &*sign_in_state {
|
match &*sign_in_state {
|
||||||
SignInState::PickMode
|
SignInState::PickMode
|
||||||
| SignInState::EnvVarMissing
|
| SignInState::ApiKeyEntry(_)
|
||||||
| SignInState::ChatGptContinueInBrowser(_)
|
| SignInState::ChatGptContinueInBrowser(_)
|
||||||
| SignInState::ChatGptSuccessMessage => StepState::InProgress,
|
| SignInState::ChatGptSuccessMessage => StepState::InProgress,
|
||||||
SignInState::ChatGptSuccess | SignInState::EnvVarFound => StepState::Complete,
|
SignInState::ChatGptSuccess | SignInState::ApiKeyConfigured => StepState::Complete,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -397,11 +560,11 @@ impl WidgetRef for AuthModeWidget {
|
|||||||
SignInState::ChatGptSuccess => {
|
SignInState::ChatGptSuccess => {
|
||||||
self.render_chatgpt_success(area, buf);
|
self.render_chatgpt_success(area, buf);
|
||||||
}
|
}
|
||||||
SignInState::EnvVarMissing => {
|
SignInState::ApiKeyEntry(state) => {
|
||||||
self.render_env_var_missing(area, buf);
|
self.render_api_key_entry(area, buf, state);
|
||||||
}
|
}
|
||||||
SignInState::EnvVarFound => {
|
SignInState::ApiKeyConfigured => {
|
||||||
self.render_env_var_found(area, buf);
|
self.render_api_key_configured(area, buf);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ enum Step {
|
|||||||
|
|
||||||
pub(crate) trait KeyboardHandler {
|
pub(crate) trait KeyboardHandler {
|
||||||
fn handle_key_event(&mut self, key_event: KeyEvent);
|
fn handle_key_event(&mut self, key_event: KeyEvent);
|
||||||
|
fn handle_paste(&mut self, _pasted: String) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) enum StepState {
|
pub(crate) enum StepState {
|
||||||
@@ -69,7 +70,6 @@ impl OnboardingScreen {
|
|||||||
auth_manager,
|
auth_manager,
|
||||||
config,
|
config,
|
||||||
} = args;
|
} = args;
|
||||||
let preferred_auth_method = config.preferred_auth_method;
|
|
||||||
let cwd = config.cwd.clone();
|
let cwd = config.cwd.clone();
|
||||||
let codex_home = config.codex_home.clone();
|
let codex_home = config.codex_home.clone();
|
||||||
let mut steps: Vec<Step> = vec![Step::Welcome(WelcomeWidget {
|
let mut steps: Vec<Step> = vec![Step::Welcome(WelcomeWidget {
|
||||||
@@ -84,7 +84,6 @@ impl OnboardingScreen {
|
|||||||
codex_home: codex_home.clone(),
|
codex_home: codex_home.clone(),
|
||||||
login_status,
|
login_status,
|
||||||
auth_manager,
|
auth_manager,
|
||||||
preferred_auth_method,
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
let is_git_repo = get_git_repo_root(&cwd).is_some();
|
let is_git_repo = get_git_repo_root(&cwd).is_some();
|
||||||
@@ -194,6 +193,17 @@ impl KeyboardHandler for OnboardingScreen {
|
|||||||
};
|
};
|
||||||
self.request_frame.schedule_frame();
|
self.request_frame.schedule_frame();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_paste(&mut self, pasted: String) {
|
||||||
|
if pasted.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(active_step) = self.current_steps_mut().into_iter().last() {
|
||||||
|
active_step.handle_paste(pasted);
|
||||||
|
}
|
||||||
|
self.request_frame.schedule_frame();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WidgetRef for &OnboardingScreen {
|
impl WidgetRef for &OnboardingScreen {
|
||||||
@@ -263,6 +273,14 @@ impl KeyboardHandler for Step {
|
|||||||
Step::TrustDirectory(widget) => widget.handle_key_event(key_event),
|
Step::TrustDirectory(widget) => widget.handle_key_event(key_event),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_paste(&mut self, pasted: String) {
|
||||||
|
match self {
|
||||||
|
Step::Welcome(_) => {}
|
||||||
|
Step::Auth(widget) => widget.handle_paste(pasted),
|
||||||
|
Step::TrustDirectory(widget) => widget.handle_paste(pasted),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StepStateProvider for Step {
|
impl StepStateProvider for Step {
|
||||||
@@ -312,12 +330,14 @@ pub(crate) async fn run_onboarding_app(
|
|||||||
TuiEvent::Key(key_event) => {
|
TuiEvent::Key(key_event) => {
|
||||||
onboarding_screen.handle_key_event(key_event);
|
onboarding_screen.handle_key_event(key_event);
|
||||||
}
|
}
|
||||||
|
TuiEvent::Paste(text) => {
|
||||||
|
onboarding_screen.handle_paste(text);
|
||||||
|
}
|
||||||
TuiEvent::Draw => {
|
TuiEvent::Draw => {
|
||||||
let _ = tui.draw(u16::MAX, |frame| {
|
let _ = tui.draw(u16::MAX, |frame| {
|
||||||
frame.render_widget_ref(&onboarding_screen, frame.area());
|
frame.render_widget_ref(&onboarding_screen, frame.area());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ Run Codex head-less in pipelines. Example GitHub Action step:
|
|||||||
- name: Update changelog via Codex
|
- name: Update changelog via Codex
|
||||||
run: |
|
run: |
|
||||||
npm install -g @openai/codex
|
npm install -g @openai/codex
|
||||||
export OPENAI_API_KEY="${{ secrets.OPENAI_KEY }}"
|
codex login --api-key "${{ secrets.OPENAI_KEY }}"
|
||||||
codex exec --full-auto "update CHANGELOG for next release"
|
codex exec --full-auto "update CHANGELOG for next release"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,10 @@
|
|||||||
|
|
||||||
## Usage-based billing alternative: Use an OpenAI API key
|
## Usage-based billing alternative: Use an OpenAI API key
|
||||||
|
|
||||||
If you prefer to pay-as-you-go, you can still authenticate with your OpenAI API key by setting it as an environment variable:
|
If you prefer to pay-as-you-go, you can still authenticate with your OpenAI API key:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
export OPENAI_API_KEY="your-api-key-here"
|
codex login --api-key "your-api-key-here"
|
||||||
```
|
```
|
||||||
|
|
||||||
This key must, at minimum, have write access to the Responses API.
|
This key must, at minimum, have write access to the Responses API.
|
||||||
@@ -18,36 +18,6 @@ If you've used the Codex CLI before with usage-based billing via an API key and
|
|||||||
2. Delete `~/.codex/auth.json` (on Windows: `C:\\Users\\USERNAME\\.codex\\auth.json`)
|
2. Delete `~/.codex/auth.json` (on Windows: `C:\\Users\\USERNAME\\.codex\\auth.json`)
|
||||||
3. Run `codex login` again
|
3. Run `codex login` again
|
||||||
|
|
||||||
## Forcing a specific auth method (advanced)
|
|
||||||
|
|
||||||
You can explicitly choose which authentication Codex should prefer when both are available.
|
|
||||||
|
|
||||||
- To always use your API key (even when ChatGPT auth exists), set:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
# ~/.codex/config.toml
|
|
||||||
preferred_auth_method = "apikey"
|
|
||||||
```
|
|
||||||
|
|
||||||
Or override ad-hoc via CLI:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
codex --config preferred_auth_method="apikey"
|
|
||||||
```
|
|
||||||
|
|
||||||
- To prefer ChatGPT auth (default), set:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
# ~/.codex/config.toml
|
|
||||||
preferred_auth_method = "chatgpt"
|
|
||||||
```
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
|
|
||||||
- When `preferred_auth_method = "apikey"` and an API key is available, the login screen is skipped.
|
|
||||||
- When `preferred_auth_method = "chatgpt"` (default), Codex prefers ChatGPT auth if present; if only an API key is present, it will use the API key. Certain account types may also require API-key mode.
|
|
||||||
- To check which auth method is being used during a session, use the `/status` command in the TUI.
|
|
||||||
|
|
||||||
## Connecting on a "Headless" Machine
|
## Connecting on a "Headless" Machine
|
||||||
|
|
||||||
Today, the login process entails running a server on `localhost:1455`. If you are on a "headless" server, such as a Docker container or are `ssh`'d into a remote machine, loading `localhost:1455` in the browser on your local machine will not automatically connect to the webserver running on the _headless_ machine, so you must use one of the following workarounds:
|
Today, the login process entails running a server on `localhost:1455`. If you are on a "headless" server, such as a Docker container or are `ssh`'d into a remote machine, loading `localhost:1455` in the browser on your local machine will not automatically connect to the webserver running on the _headless_ machine, so you must use one of the following workarounds:
|
||||||
|
|||||||
@@ -612,5 +612,4 @@ Options that are specific to the TUI.
|
|||||||
| `experimental_use_exec_command_tool` | boolean | Use experimental exec command tool. |
|
| `experimental_use_exec_command_tool` | boolean | Use experimental exec command tool. |
|
||||||
| `responses_originator_header_internal_override` | string | Override `originator` header value. |
|
| `responses_originator_header_internal_override` | string | Override `originator` header value. |
|
||||||
| `projects.<path>.trust_level` | string | Mark project/worktree as trusted (only `"trusted"` is recognized). |
|
| `projects.<path>.trust_level` | string | Mark project/worktree as trusted (only `"trusted"` is recognized). |
|
||||||
| `preferred_auth_method` | `chatgpt` \| `apikey` | Select default auth method (default: `chatgpt`). |
|
|
||||||
| `tools.web_search` | boolean | Enable web search tool (alias: `web_search_request`) (default: false). |
|
| `tools.web_search` | boolean | Enable web search tool (alias: `web_search_request`) (default: false). |
|
||||||
|
|||||||
Reference in New Issue
Block a user