Add forced_chatgpt_workspace_id and forced_login_method configuration options (#5303)

This PR adds support for configs to specify a forced login method
(chatgpt or api) as well as a forced chatgpt account id. This lets
enterprises uses [managed
configs](https://developers.openai.com/codex/security#managed-configuration)
to force all employees to use their company's workspace instead of their
own or any other.

When a workspace id is set, a query param is sent to the login flow
which auto-selects the given workspace or errors if the user isn't a
member of it.

This PR is large but a large % of it is tests, wiring, and required
formatting changes.

API login with chatgpt forced
<img width="1592" height="116" alt="CleanShot 2025-10-19 at 22 40 04"
src="https://github.com/user-attachments/assets/560c6bb4-a20a-4a37-95af-93df39d057dd"
/>

ChatGPT login with api forced
<img width="1018" height="100" alt="CleanShot 2025-10-19 at 22 40 29"
src="https://github.com/user-attachments/assets/d010bbbb-9c8d-4227-9eda-e55bf043b4af"
/>

Onboarding with api forced
<img width="892" height="460" alt="CleanShot 2025-10-19 at 22 41 02"
src="https://github.com/user-attachments/assets/cc0ed45c-b257-4d62-a32e-6ca7514b5edd"
/>

Onboarding with ChatGPT forced
<img width="1154" height="426" alt="CleanShot 2025-10-19 at 22 41 27"
src="https://github.com/user-attachments/assets/41c41417-dc68-4bb4-b3e7-3b7769f7e6a1"
/>

Logging in with the wrong workspace
<img width="2222" height="84" alt="CleanShot 2025-10-19 at 22 42 31"
src="https://github.com/user-attachments/assets/0ff4222c-f626-4dd3-b035-0b7fe998a046"
/>
This commit is contained in:
Gabriel Peal
2025-10-20 08:50:54 -07:00
committed by GitHub
parent d01f91ecec
commit d87f87e25b
19 changed files with 920 additions and 66 deletions

View File

@@ -5,6 +5,7 @@ use crate::JSONRPCNotification;
use crate::JSONRPCRequest;
use crate::RequestId;
use codex_protocol::ConversationId;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
@@ -473,6 +474,11 @@ pub struct UserSavedConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub sandbox_settings: Option<SandboxSettings>,
#[serde(skip_serializing_if = "Option::is_none")]
pub forced_chatgpt_workspace_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub forced_login_method: Option<ForcedLoginMethod>,
/// Model-specific configuration
#[serde(skip_serializing_if = "Option::is_none")]
pub model: Option<String>,

View File

@@ -84,6 +84,7 @@ use codex_login::ServerOptions as LoginServerOptions;
use codex_login::ShutdownHandle;
use codex_login::run_login_server;
use codex_protocol::ConversationId;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::InputMessageKind;
@@ -243,6 +244,19 @@ impl CodexMessageProcessor {
}
async fn login_api_key(&mut self, request_id: RequestId, params: LoginApiKeyParams) {
if matches!(
self.config.forced_login_method,
Some(ForcedLoginMethod::Chatgpt)
) {
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: "API key login is disabled. Use ChatGPT login instead.".to_string(),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
{
let mut guard = self.active_login.lock().await;
if let Some(active) = guard.take() {
@@ -278,9 +292,23 @@ impl CodexMessageProcessor {
async fn login_chatgpt(&mut self, request_id: RequestId) {
let config = self.config.as_ref();
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: "ChatGPT login is disabled. Use API key login instead.".to_string(),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
let opts = LoginServerOptions {
open_browser: false,
..LoginServerOptions::new(config.codex_home.clone(), CLIENT_ID.to_string())
..LoginServerOptions::new(
config.codex_home.clone(),
CLIENT_ID.to_string(),
config.forced_chatgpt_workspace_id.clone(),
)
};
enum LoginChatGptReply {

View File

@@ -5,6 +5,7 @@ use app_test_support::to_response;
use codex_app_server_protocol::AuthMode;
use codex_app_server_protocol::GetAuthStatusParams;
use codex_app_server_protocol::GetAuthStatusResponse;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::LoginApiKeyParams;
use codex_app_server_protocol::LoginApiKeyResponse;
@@ -57,6 +58,19 @@ sandbox_mode = "danger-full-access"
)
}
fn create_config_toml_forced_login(codex_home: &Path, forced_method: &str) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
let contents = format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "danger-full-access"
forced_login_method = "{forced_method}"
"#
);
std::fs::write(config_toml, contents)
}
async fn login_with_api_key_via_request(mcp: &mut McpProcess, api_key: &str) {
let request_id = mcp
.send_login_api_key_request(LoginApiKeyParams {
@@ -221,3 +235,38 @@ async fn get_auth_status_with_api_key_no_include_token() {
assert_eq!(status.auth_method, Some(AuthMode::ApiKey));
assert!(status.auth_token.is_none(), "token must be omitted");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn login_api_key_rejected_when_forced_chatgpt() {
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
create_config_toml_forced_login(codex_home.path(), "chatgpt")
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
let mut mcp = McpProcess::new(codex_home.path())
.await
.expect("spawn mcp process");
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
.await
.expect("init timeout")
.expect("init failed");
let request_id = mcp
.send_login_api_key_request(LoginApiKeyParams {
api_key: "sk-test-key".to_string(),
})
.await
.expect("send loginApiKey");
let err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await
.expect("loginApiKey error timeout")
.expect("loginApiKey error");
assert_eq!(
err.error.message,
"API key login is disabled. Use ChatGPT login instead."
);
}

View File

@@ -11,6 +11,7 @@ use codex_app_server_protocol::SandboxSettings;
use codex_app_server_protocol::Tools;
use codex_app_server_protocol::UserSavedConfig;
use codex_core::protocol::AskForApproval;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
@@ -33,6 +34,8 @@ model_reasoning_summary = "detailed"
model_reasoning_effort = "high"
model_verbosity = "medium"
profile = "test"
forced_chatgpt_workspace_id = "12345678-0000-0000-0000-000000000000"
forced_login_method = "chatgpt"
[sandbox_workspace_write]
writable_roots = ["/tmp"]
@@ -92,6 +95,8 @@ async fn get_config_toml_parses_all_fields() {
exclude_tmpdir_env_var: Some(true),
exclude_slash_tmp: Some(true),
}),
forced_chatgpt_workspace_id: Some("12345678-0000-0000-0000-000000000000".into()),
forced_login_method: Some(ForcedLoginMethod::Chatgpt),
model: Some("gpt-5-codex".into()),
model_reasoning_effort: Some(ReasoningEffort::High),
model_reasoning_summary: Some(ReasoningSummary::Detailed),
@@ -149,6 +154,8 @@ async fn get_config_toml_empty() {
approval_policy: None,
sandbox_mode: None,
sandbox_settings: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
model: None,
model_reasoning_effort: None,
model_reasoning_summary: None,

View File

@@ -7,6 +7,7 @@ use codex_app_server_protocol::CancelLoginChatGptParams;
use codex_app_server_protocol::CancelLoginChatGptResponse;
use codex_app_server_protocol::GetAuthStatusParams;
use codex_app_server_protocol::GetAuthStatusResponse;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::LoginChatGptResponse;
use codex_app_server_protocol::LogoutChatGptResponse;
@@ -144,3 +145,97 @@ async fn login_and_cancel_chatgpt() {
eprintln!("warning: did not observe login_chat_gpt_complete notification after cancel");
}
}
fn create_config_toml_forced_login(codex_home: &Path, forced_method: &str) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
let contents = format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "danger-full-access"
forced_login_method = "{forced_method}"
"#
);
std::fs::write(config_toml, contents)
}
fn create_config_toml_forced_workspace(
codex_home: &Path,
workspace_id: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
let contents = format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "danger-full-access"
forced_chatgpt_workspace_id = "{workspace_id}"
"#
);
std::fs::write(config_toml, contents)
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn login_chatgpt_rejected_when_forced_api() {
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
create_config_toml_forced_login(codex_home.path(), "api")
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
let mut mcp = McpProcess::new(codex_home.path())
.await
.expect("spawn mcp process");
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
.await
.expect("init timeout")
.expect("init failed");
let request_id = mcp
.send_login_chat_gpt_request()
.await
.expect("send loginChatGpt");
let err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await
.expect("loginChatGpt error timeout")
.expect("loginChatGpt error");
assert_eq!(
err.error.message,
"ChatGPT login is disabled. Use API key login instead."
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn login_chatgpt_includes_forced_workspace_query_param() {
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
create_config_toml_forced_workspace(codex_home.path(), "ws-forced")
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
let mut mcp = McpProcess::new(codex_home.path())
.await
.expect("spawn mcp process");
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
.await
.expect("init timeout")
.expect("init failed");
let request_id = mcp
.send_login_chat_gpt_request()
.await
.expect("send loginChatGpt");
let resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await
.expect("loginChatGpt timeout")
.expect("loginChatGpt response");
let login: LoginChatGptResponse = to_response(resp).expect("deserialize login resp");
assert!(
login.auth_url.contains("allowed_workspace_id=ws-forced"),
"auth URL should include forced workspace"
);
}

View File

@@ -9,12 +9,20 @@ use codex_core::config::ConfigOverrides;
use codex_login::ServerOptions;
use codex_login::run_device_code_login;
use codex_login::run_login_server;
use codex_protocol::config_types::ForcedLoginMethod;
use std::io::IsTerminal;
use std::io::Read;
use std::path::PathBuf;
pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
let opts = ServerOptions::new(codex_home, CLIENT_ID.to_string());
pub async fn login_with_chatgpt(
codex_home: PathBuf,
forced_chatgpt_workspace_id: Option<String>,
) -> std::io::Result<()> {
let opts = ServerOptions::new(
codex_home,
CLIENT_ID.to_string(),
forced_chatgpt_workspace_id,
);
let server = run_login_server(opts)?;
eprintln!(
@@ -28,7 +36,14 @@ pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> {
pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) -> ! {
let config = load_config_or_exit(cli_config_overrides).await;
match login_with_chatgpt(config.codex_home).await {
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
eprintln!("ChatGPT login is disabled. Use API key login instead.");
std::process::exit(1);
}
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
match login_with_chatgpt(config.codex_home, forced_chatgpt_workspace_id).await {
Ok(_) => {
eprintln!("Successfully logged in");
std::process::exit(0);
@@ -46,6 +61,11 @@ pub async fn run_login_with_api_key(
) -> ! {
let config = load_config_or_exit(cli_config_overrides).await;
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Chatgpt)) {
eprintln!("API key login is disabled. Use ChatGPT login instead.");
std::process::exit(1);
}
match login_with_api_key(&config.codex_home, &api_key) {
Ok(_) => {
eprintln!("Successfully logged in");
@@ -92,9 +112,15 @@ pub async fn run_login_with_device_code(
client_id: Option<String>,
) -> ! {
let config = load_config_or_exit(cli_config_overrides).await;
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
eprintln!("ChatGPT login is disabled. Use API key login instead.");
std::process::exit(1);
}
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
let mut opts = ServerOptions::new(
config.codex_home,
client_id.unwrap_or(CLIENT_ID.to_string()),
forced_chatgpt_workspace_id,
);
if let Some(iss) = issuer_base_url {
opts.issuer = iss;

View File

@@ -2,6 +2,8 @@ use chrono::DateTime;
use chrono::Utc;
use serde::Deserialize;
use serde::Serialize;
#[cfg(test)]
use serial_test::serial;
use std::env;
use std::fs::File;
use std::fs::OpenOptions;
@@ -16,7 +18,9 @@ use std::sync::Mutex;
use std::time::Duration;
use codex_app_server_protocol::AuthMode;
use codex_protocol::config_types::ForcedLoginMethod;
use crate::config::Config;
use crate::token_data::PlanType;
use crate::token_data::TokenData;
use crate::token_data::parse_id_token;
@@ -233,6 +237,74 @@ pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<(
write_auth_json(&get_auth_file(codex_home), &auth_dot_json)
}
pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
let Some(auth) = load_auth(&config.codex_home, true)? else {
return Ok(());
};
if let Some(required_method) = config.forced_login_method {
let method_violation = match (required_method, auth.mode) {
(ForcedLoginMethod::Api, AuthMode::ApiKey) => None,
(ForcedLoginMethod::Chatgpt, AuthMode::ChatGPT) => None,
(ForcedLoginMethod::Api, AuthMode::ChatGPT) => Some(
"API key login is required, but ChatGPT is currently being used. Logging out."
.to_string(),
),
(ForcedLoginMethod::Chatgpt, AuthMode::ApiKey) => Some(
"ChatGPT login is required, but an API key is currently being used. Logging out."
.to_string(),
),
};
if let Some(message) = method_violation {
return logout_with_message(&config.codex_home, message);
}
}
if let Some(expected_account_id) = config.forced_chatgpt_workspace_id.as_deref() {
if auth.mode != AuthMode::ChatGPT {
return Ok(());
}
let token_data = match auth.get_token_data().await {
Ok(data) => data,
Err(err) => {
return logout_with_message(
&config.codex_home,
format!(
"Failed to load ChatGPT credentials while enforcing workspace restrictions: {err}. Logging out."
),
);
}
};
// workspace is the external identifier for account id.
let chatgpt_account_id = token_data.id_token.chatgpt_account_id.as_deref();
if chatgpt_account_id != Some(expected_account_id) {
let message = match chatgpt_account_id {
Some(actual) => format!(
"Login is restricted to workspace {expected_account_id}, but current credentials belong to {actual}. Logging out."
),
None => format!(
"Login is restricted to workspace {expected_account_id}, but current credentials lack a workspace identifier. Logging out."
),
};
return logout_with_message(&config.codex_home, message);
}
}
Ok(())
}
fn logout_with_message(codex_home: &Path, message: String) -> std::io::Result<()> {
match logout(codex_home) {
Ok(_) => Err(std::io::Error::other(message)),
Err(err) => Err(std::io::Error::other(format!(
"{message}. Failed to remove auth.json: {err}"
))),
}
}
fn load_auth(
codex_home: &Path,
enable_codex_api_key_env: bool,
@@ -249,9 +321,8 @@ fn load_auth(
let client = crate::default_client::create_client();
let auth_dot_json = match try_read_auth_json(&auth_file) {
Ok(auth) => auth,
Err(e) => {
return Err(e);
}
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err),
};
let AuthDotJson {
@@ -403,17 +474,19 @@ struct CachedAuth {
#[cfg(test)]
mod tests {
use super::*;
use crate::config::Config;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::token_data::IdTokenInfo;
use crate::token_data::KnownPlan;
use crate::token_data::PlanType;
use base64::Engine;
use codex_protocol::config_types::ForcedLoginMethod;
use pretty_assertions::assert_eq;
use serde::Serialize;
use serde_json::json;
use tempfile::tempdir;
const LAST_REFRESH: &str = "2025-08-06T20:41:36.232376Z";
#[tokio::test]
async fn roundtrip_auth_dot_json() {
let codex_home = tempdir().unwrap();
@@ -421,6 +494,7 @@ mod tests {
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: None,
},
codex_home.path(),
)
@@ -460,6 +534,13 @@ mod tests {
assert!(auth.tokens.is_none(), "tokens should be cleared");
}
#[test]
fn missing_auth_json_returns_none() {
let dir = tempdir().unwrap();
let auth = CodexAuth::from_codex_home(dir.path()).expect("call should succeed");
assert_eq!(auth, None);
}
#[tokio::test]
async fn pro_account_with_no_api_key_uses_chatgpt_auth() {
let codex_home = tempdir().unwrap();
@@ -467,6 +548,7 @@ mod tests {
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: None,
},
codex_home.path(),
)
@@ -484,6 +566,10 @@ mod tests {
let guard = auth_dot_json.lock().unwrap();
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
let last_refresh = auth_dot_json
.last_refresh
.expect("last_refresh should be recorded");
assert_eq!(
&AuthDotJson {
openai_api_key: None,
@@ -491,20 +577,17 @@ mod tests {
id_token: IdTokenInfo {
email: Some("user@example.com".to_string()),
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
chatgpt_account_id: None,
raw_jwt: fake_jwt,
},
access_token: "test-access-token".to_string(),
refresh_token: "test-refresh-token".to_string(),
account_id: None,
}),
last_refresh: Some(
DateTime::parse_from_rfc3339(LAST_REFRESH)
.unwrap()
.with_timezone(&Utc)
),
last_refresh: Some(last_refresh),
},
auth_dot_json
)
);
}
#[tokio::test]
@@ -543,6 +626,7 @@ mod tests {
struct AuthFileParams {
openai_api_key: Option<String>,
chatgpt_plan_type: String,
chatgpt_account_id: Option<String>,
}
fn write_auth_file(params: AuthFileParams, codex_home: &Path) -> std::io::Result<String> {
@@ -557,15 +641,21 @@ mod tests {
alg: "none",
typ: "JWT",
};
let payload = serde_json::json!({
"email": "user@example.com",
"email_verified": true,
"https://api.openai.com/auth": {
"chatgpt_account_id": "bc3618e3-489d-4d49-9362-1561dc53ba53",
let mut auth_payload = serde_json::json!({
"chatgpt_plan_type": params.chatgpt_plan_type,
"chatgpt_user_id": "user-12345",
"user_id": "user-12345",
});
if let Some(chatgpt_account_id) = params.chatgpt_account_id {
let org_value = serde_json::Value::String(chatgpt_account_id);
auth_payload["chatgpt_account_id"] = org_value;
}
let payload = serde_json::json!({
"email": "user@example.com",
"email_verified": true,
"https://api.openai.com/auth": auth_payload,
});
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
let header_b64 = b64(&serde_json::to_vec(&header)?);
@@ -580,12 +670,159 @@ mod tests {
"access_token": "test-access-token",
"refresh_token": "test-refresh-token"
},
"last_refresh": LAST_REFRESH,
"last_refresh": Utc::now(),
});
let auth_json = serde_json::to_string_pretty(&auth_json_data)?;
std::fs::write(auth_file, auth_json)?;
Ok(fake_jwt)
}
fn build_config(
codex_home: &Path,
forced_login_method: Option<ForcedLoginMethod>,
forced_chatgpt_workspace_id: Option<String>,
) -> Config {
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.to_path_buf(),
)
.expect("config should load");
config.forced_login_method = forced_login_method;
config.forced_chatgpt_workspace_id = forced_chatgpt_workspace_id;
config
}
/// Use sparingly.
/// TODO (gpeal): replace this with an injectable env var provider.
#[cfg(test)]
struct EnvVarGuard {
key: &'static str,
original: Option<std::ffi::OsString>,
}
#[cfg(test)]
impl EnvVarGuard {
fn set(key: &'static str, value: &str) -> Self {
let original = env::var_os(key);
unsafe {
env::set_var(key, value);
}
Self { key, original }
}
}
#[cfg(test)]
impl Drop for EnvVarGuard {
fn drop(&mut self) {
unsafe {
match &self.original {
Some(value) => env::set_var(self.key, value),
None => env::remove_var(self.key),
}
}
}
}
#[tokio::test]
async fn enforce_login_restrictions_logs_out_for_method_mismatch() {
let codex_home = tempdir().unwrap();
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
let err = super::enforce_login_restrictions(&config)
.await
.expect_err("expected method mismatch to error");
assert!(err.to_string().contains("ChatGPT login is required"));
assert!(
!codex_home.path().join("auth.json").exists(),
"auth.json should be removed on mismatch"
);
}
#[tokio::test]
async fn enforce_login_restrictions_logs_out_for_workspace_mismatch() {
let codex_home = tempdir().unwrap();
let _jwt = write_auth_file(
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: Some("org_another_org".to_string()),
},
codex_home.path(),
)
.expect("failed to write auth file");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
let err = super::enforce_login_restrictions(&config)
.await
.expect_err("expected workspace mismatch to error");
assert!(err.to_string().contains("workspace org_mine"));
assert!(
!codex_home.path().join("auth.json").exists(),
"auth.json should be removed on mismatch"
);
}
#[tokio::test]
async fn enforce_login_restrictions_allows_matching_workspace() {
let codex_home = tempdir().unwrap();
let _jwt = write_auth_file(
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: Some("org_mine".to_string()),
},
codex_home.path(),
)
.expect("failed to write auth file");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
super::enforce_login_restrictions(&config)
.await
.expect("matching workspace should succeed");
assert!(
codex_home.path().join("auth.json").exists(),
"auth.json should remain when restrictions pass"
);
}
#[tokio::test]
async fn enforce_login_restrictions_allows_api_key_if_login_method_not_set_but_forced_chatgpt_workspace_id_is_set()
{
let codex_home = tempdir().unwrap();
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
super::enforce_login_restrictions(&config)
.await
.expect("matching workspace should succeed");
assert!(
codex_home.path().join("auth.json").exists(),
"auth.json should remain when restrictions pass"
);
}
#[tokio::test]
#[serial(codex_api_key)]
async fn enforce_login_restrictions_blocks_env_api_key_when_chatgpt_required() {
let _guard = EnvVarGuard::set(CODEX_API_KEY_ENV_VAR, "sk-env");
let codex_home = tempdir().unwrap();
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
let err = super::enforce_login_restrictions(&config)
.await
.expect_err("environment API key should not satisfy forced ChatGPT login");
assert!(
err.to_string()
.contains("ChatGPT login is required, but an API key is currently being used.")
);
}
}
/// Central manager providing a single source of truth for auth.json derived

View File

@@ -36,6 +36,7 @@ use crate::protocol::SandboxPolicy;
use anyhow::Context;
use codex_app_server_protocol::Tools;
use codex_app_server_protocol::UserSavedConfig;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
@@ -209,6 +210,12 @@ pub struct Config {
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: String,
/// When set, restricts ChatGPT login to a specific workspace identifier.
pub forced_chatgpt_workspace_id: Option<String>,
/// When set, restricts the login mechanism users may use.
pub forced_login_method: Option<ForcedLoginMethod>,
/// Include an experimental plan tool that the model can use to update its current plan and status of each step.
pub include_plan_tool: bool,
@@ -844,6 +851,14 @@ pub struct ConfigToml {
/// System instructions.
pub instructions: Option<String>,
/// When set, restricts ChatGPT login to a specific workspace identifier.
#[serde(default)]
pub forced_chatgpt_workspace_id: Option<String>,
/// When set, restricts the login mechanism users may use.
#[serde(default)]
pub forced_login_method: Option<ForcedLoginMethod>,
/// Definition for MCP servers that Codex can reach out to for tool calls.
#[serde(default)]
pub mcp_servers: HashMap<String, McpServerConfig>,
@@ -950,6 +965,8 @@ impl From<ConfigToml> for UserSavedConfig {
approval_policy: config_toml.approval_policy,
sandbox_mode: config_toml.sandbox_mode,
sandbox_settings: config_toml.sandbox_workspace_write.map(From::from),
forced_chatgpt_workspace_id: config_toml.forced_chatgpt_workspace_id,
forced_login_method: config_toml.forced_login_method,
model: config_toml.model,
model_reasoning_effort: config_toml.model_reasoning_effort,
model_reasoning_summary: config_toml.model_reasoning_summary,
@@ -1250,6 +1267,18 @@ impl Config {
let use_experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
let use_experimental_use_rmcp_client = features.enabled(Feature::RmcpClient);
let forced_chatgpt_workspace_id =
cfg.forced_chatgpt_workspace_id.as_ref().and_then(|value| {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
});
let forced_login_method = cfg.forced_login_method;
let model = model
.or(config_profile.model)
.or(cfg.model)
@@ -1358,6 +1387,8 @@ impl Config {
.chatgpt_base_url
.or(cfg.chatgpt_base_url)
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
forced_chatgpt_workspace_id,
forced_login_method,
include_plan_tool: include_plan_tool_flag,
include_apply_patch_tool: include_apply_patch_tool_flag,
tools_web_search_request,
@@ -2800,6 +2831,8 @@ model_verbosity = "high"
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
@@ -2867,6 +2900,8 @@ model_verbosity = "high"
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
@@ -2949,6 +2984,8 @@ model_verbosity = "high"
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
@@ -3017,6 +3054,8 @@ model_verbosity = "high"
model_verbosity: Some(Verbosity::High),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,

View File

@@ -28,6 +28,8 @@ pub struct IdTokenInfo {
/// (e.g., "free", "plus", "pro", "business", "enterprise", "edu").
/// (Note: values may vary by backend.)
pub(crate) chatgpt_plan_type: Option<PlanType>,
/// Organization/workspace identifier associated with the token, if present.
pub chatgpt_account_id: Option<String>,
pub raw_jwt: String,
}
@@ -71,6 +73,8 @@ struct IdClaims {
struct AuthClaims {
#[serde(default)]
chatgpt_plan_type: Option<PlanType>,
#[serde(default)]
chatgpt_account_id: Option<String>,
}
#[derive(Debug, Error)]
@@ -94,11 +98,20 @@ pub fn parse_id_token(id_token: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(payload_b64)?;
let claims: IdClaims = serde_json::from_slice(&payload_bytes)?;
Ok(IdTokenInfo {
match claims.auth {
Some(auth) => Ok(IdTokenInfo {
email: claims.email,
chatgpt_plan_type: claims.auth.and_then(|a| a.chatgpt_plan_type),
raw_jwt: id_token.to_string(),
})
chatgpt_plan_type: auth.chatgpt_plan_type,
chatgpt_account_id: auth.chatgpt_account_id,
}),
None => Ok(IdTokenInfo {
email: claims.email,
raw_jwt: id_token.to_string(),
chatgpt_plan_type: None,
chatgpt_account_id: None,
}),
}
}
fn deserialize_id_token<'de, D>(deserializer: D) -> Result<IdTokenInfo, D::Error>

View File

@@ -15,6 +15,7 @@ use codex_core::AuthManager;
use codex_core::BUILT_IN_OSS_MODEL_PROVIDER_ID;
use codex_core::ConversationManager;
use codex_core::NewConversation;
use codex_core::auth::enforce_login_restrictions;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::features::Feature;
@@ -195,6 +196,11 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides).await?;
let approve_all_enabled = config.features.enabled(Feature::ApproveAll);
if let Err(err) = enforce_login_restrictions(&config).await {
eprintln!("{err}");
std::process::exit(1);
}
let otel = codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION"));
#[allow(clippy::print_stderr)]

View File

@@ -186,6 +186,13 @@ pub async fn run_device_code_login(opts: ServerOptions) -> std::io::Result<()> {
.await
.map_err(|err| std::io::Error::other(format!("device code exchange failed: {err}")))?;
if let Err(message) = crate::server::ensure_workspace_allowed(
opts.forced_chatgpt_workspace_id.as_deref(),
&tokens.id_token,
) {
return Err(io::Error::new(io::ErrorKind::PermissionDenied, message));
}
crate::server::persist_tokens_async(
&opts.codex_home,
None,

View File

@@ -38,10 +38,15 @@ pub struct ServerOptions {
pub port: u16,
pub open_browser: bool,
pub force_state: Option<String>,
pub forced_chatgpt_workspace_id: Option<String>,
}
impl ServerOptions {
pub fn new(codex_home: PathBuf, client_id: String) -> Self {
pub fn new(
codex_home: PathBuf,
client_id: String,
forced_chatgpt_workspace_id: Option<String>,
) -> Self {
Self {
codex_home,
client_id,
@@ -49,6 +54,7 @@ impl ServerOptions {
port: DEFAULT_PORT,
open_browser: true,
force_state: None,
forced_chatgpt_workspace_id,
}
}
}
@@ -104,7 +110,14 @@ pub fn run_login_server(opts: ServerOptions) -> io::Result<LoginServer> {
let server = Arc::new(server);
let redirect_uri = format!("http://localhost:{actual_port}/auth/callback");
let auth_url = build_authorize_url(&opts.issuer, &opts.client_id, &redirect_uri, &pkce, &state);
let auth_url = build_authorize_url(
&opts.issuer,
&opts.client_id,
&redirect_uri,
&pkce,
&state,
opts.forced_chatgpt_workspace_id.as_deref(),
);
if opts.open_browser {
let _ = webbrowser::open(&auth_url);
@@ -240,6 +253,13 @@ async fn process_request(
.await
{
Ok(tokens) => {
if let Err(message) = ensure_workspace_allowed(
opts.forced_chatgpt_workspace_id.as_deref(),
&tokens.id_token,
) {
eprintln!("Workspace restriction error: {message}");
return login_error_response(&message);
}
// Obtain API key via token-exchange and persist
let api_key = obtain_api_key(&opts.issuer, &opts.client_id, &tokens.id_token)
.await
@@ -358,22 +378,35 @@ fn build_authorize_url(
redirect_uri: &str,
pkce: &PkceCodes,
state: &str,
forced_chatgpt_workspace_id: Option<&str>,
) -> String {
let query = vec![
("response_type", "code"),
("client_id", client_id),
("redirect_uri", redirect_uri),
("scope", "openid profile email offline_access"),
("code_challenge", &pkce.code_challenge),
("code_challenge_method", "S256"),
("id_token_add_organizations", "true"),
("codex_cli_simplified_flow", "true"),
("state", state),
("originator", originator().value.as_str()),
let mut query = vec![
("response_type".to_string(), "code".to_string()),
("client_id".to_string(), client_id.to_string()),
("redirect_uri".to_string(), redirect_uri.to_string()),
(
"scope".to_string(),
"openid profile email offline_access".to_string(),
),
(
"code_challenge".to_string(),
pkce.code_challenge.to_string(),
),
("code_challenge_method".to_string(), "S256".to_string()),
("id_token_add_organizations".to_string(), "true".to_string()),
("codex_cli_simplified_flow".to_string(), "true".to_string()),
("state".to_string(), state.to_string()),
(
"originator".to_string(),
originator().value.as_str().to_string(),
),
];
if let Some(workspace_id) = forced_chatgpt_workspace_id {
query.push(("allowed_workspace_id".to_string(), workspace_id.to_string()));
}
let qs = query
.into_iter()
.map(|(k, v)| format!("{}={}", k, urlencoding::encode(v)))
.map(|(k, v)| format!("{k}={}", urlencoding::encode(&v)))
.collect::<Vec<_>>()
.join("&");
format!("{issuer}/oauth/authorize?{qs}")
@@ -616,6 +649,43 @@ fn jwt_auth_claims(jwt: &str) -> serde_json::Map<String, serde_json::Value> {
serde_json::Map::new()
}
pub(crate) fn ensure_workspace_allowed(
expected: Option<&str>,
id_token: &str,
) -> Result<(), String> {
let Some(expected) = expected else {
return Ok(());
};
let claims = jwt_auth_claims(id_token);
let Some(actual) = claims.get("chatgpt_account_id").and_then(JsonValue::as_str) else {
return Err("Login is restricted to a specific workspace, but the token did not include an chatgpt_account_id claim.".to_string());
};
if actual == expected {
Ok(())
} else {
Err(format!("Login is restricted to workspace id {expected}."))
}
}
// Respond to the oauth server with an error so the code becomes unusable by anybody else.
fn login_error_response(message: &str) -> HandledRequest {
let mut headers = Vec::new();
if let Ok(header) = Header::from_bytes(&b"Content-Type"[..], &b"text/plain; charset=utf-8"[..])
{
headers.push(header);
}
HandledRequest::ResponseAndExit {
headers,
body: message.as_bytes().to_vec(),
result: Err(io::Error::new(
io::ErrorKind::PermissionDenied,
message.to_string(),
)),
}
}
pub(crate) async fn obtain_api_key(
issuer: &str,
client_id: &str,

View File

@@ -97,7 +97,11 @@ async fn mock_oauth_token_single(server: &MockServer, jwt: String) {
}
fn server_opts(codex_home: &tempfile::TempDir, issuer: String) -> ServerOptions {
let mut opts = ServerOptions::new(codex_home.path().to_path_buf(), "client-id".to_string());
let mut opts = ServerOptions::new(
codex_home.path().to_path_buf(),
"client-id".to_string(),
None,
);
opts.issuer = issuer;
opts.open_browser = false;
opts
@@ -139,6 +143,42 @@ async fn device_code_login_integration_succeeds() {
assert_eq!(tokens.account_id.as_deref(), Some("acct_321"));
}
#[tokio::test]
async fn device_code_login_rejects_workspace_mismatch() {
skip_if_no_network!();
let codex_home = tempdir().unwrap();
let mock_server = MockServer::start().await;
mock_usercode_success(&mock_server).await;
mock_poll_token_two_step(&mock_server, Arc::new(AtomicUsize::new(0)), 404).await;
let jwt = make_jwt(json!({
"https://api.openai.com/auth": {
"chatgpt_account_id": "acct_321",
"organization_id": "org-actual"
}
}));
mock_oauth_token_single(&mock_server, jwt).await;
let issuer = mock_server.uri();
let mut opts = server_opts(&codex_home, issuer);
opts.forced_chatgpt_workspace_id = Some("org-required".to_string());
let err = run_device_code_login(opts)
.await
.expect_err("device code login should fail when workspace mismatches");
assert_eq!(err.kind(), std::io::ErrorKind::PermissionDenied);
let auth_path = get_auth_file(codex_home.path());
assert!(
!auth_path.exists(),
"auth.json should not be created when workspace validation fails"
);
}
#[tokio::test]
async fn device_code_login_integration_handles_usercode_http_failure() {
skip_if_no_network!();
@@ -183,7 +223,11 @@ async fn device_code_login_integration_persists_without_api_key_on_exchange_fail
let issuer = mock_server.uri();
let mut opts = ServerOptions::new(codex_home.path().to_path_buf(), "client-id".to_string());
let mut opts = ServerOptions::new(
codex_home.path().to_path_buf(),
"client-id".to_string(),
None,
);
opts.issuer = issuer;
opts.open_browser = false;
@@ -226,7 +270,11 @@ async fn device_code_login_integration_handles_error_payload() {
let issuer = mock_server.uri();
let mut opts = ServerOptions::new(codex_home.path().to_path_buf(), "client-id".to_string());
let mut opts = ServerOptions::new(
codex_home.path().to_path_buf(),
"client-id".to_string(),
None,
);
opts.issuer = issuer;
opts.open_browser = false;

View File

@@ -14,11 +14,12 @@ use tempfile::tempdir;
// See spawn.rs for details
fn start_mock_issuer() -> (SocketAddr, thread::JoinHandle<()>) {
fn start_mock_issuer(chatgpt_account_id: &str) -> (SocketAddr, thread::JoinHandle<()>) {
// Bind to a random available port
let listener = TcpListener::bind(("127.0.0.1", 0)).unwrap();
let addr = listener.local_addr().unwrap();
let server = tiny_http::Server::from_listener(listener, None).unwrap();
let chatgpt_account_id = chatgpt_account_id.to_string();
let handle = thread::spawn(move || {
while let Ok(mut req) = server.recv() {
@@ -41,7 +42,7 @@ fn start_mock_issuer() -> (SocketAddr, thread::JoinHandle<()>) {
"email": "user@example.com",
"https://api.openai.com/auth": {
"chatgpt_plan_type": "pro",
"chatgpt_account_id": "acc-123"
"chatgpt_account_id": chatgpt_account_id,
}
});
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
@@ -80,7 +81,8 @@ fn start_mock_issuer() -> (SocketAddr, thread::JoinHandle<()>) {
async fn end_to_end_login_flow_persists_auth_json() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, issuer_handle) = start_mock_issuer();
let chatgpt_account_id = "12345678-0000-0000-0000-000000000000";
let (issuer_addr, issuer_handle) = start_mock_issuer(chatgpt_account_id);
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let tmp = tempdir()?;
@@ -113,8 +115,15 @@ async fn end_to_end_login_flow_persists_auth_json() -> Result<()> {
port: 0,
open_browser: false,
force_state: Some(state),
forced_chatgpt_workspace_id: Some(chatgpt_account_id.to_string()),
};
let server = run_login_server(opts)?;
assert!(
server
.auth_url
.contains(format!("allowed_workspace_id={chatgpt_account_id}").as_str()),
"auth URL should include forced workspace parameter"
);
let login_port = server.actual_port;
// Simulate browser callback, and follow redirect to /success
@@ -138,7 +147,7 @@ async fn end_to_end_login_flow_persists_auth_json() -> Result<()> {
assert_eq!(json["OPENAI_API_KEY"], "access-123");
assert_eq!(json["tokens"]["access_token"], "access-123");
assert_eq!(json["tokens"]["refresh_token"], "refresh-123");
assert_eq!(json["tokens"]["account_id"], "acc-123");
assert_eq!(json["tokens"]["account_id"], chatgpt_account_id);
// Stop mock issuer
drop(issuer_handle);
@@ -149,7 +158,7 @@ async fn end_to_end_login_flow_persists_auth_json() -> Result<()> {
async fn creates_missing_codex_home_dir() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, _issuer_handle) = start_mock_issuer();
let (issuer_addr, _issuer_handle) = start_mock_issuer("org-123");
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let tmp = tempdir()?;
@@ -166,6 +175,7 @@ async fn creates_missing_codex_home_dir() -> Result<()> {
port: 0,
open_browser: false,
force_state: Some(state),
forced_chatgpt_workspace_id: None,
};
let server = run_login_server(opts)?;
let login_port = server.actual_port;
@@ -185,11 +195,67 @@ async fn creates_missing_codex_home_dir() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn forced_chatgpt_workspace_id_mismatch_blocks_login() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, _issuer_handle) = start_mock_issuer("org-actual");
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let tmp = tempdir()?;
let codex_home = tmp.path().to_path_buf();
let state = "state-mismatch".to_string();
let opts = ServerOptions {
codex_home: codex_home.clone(),
client_id: codex_login::CLIENT_ID.to_string(),
issuer,
port: 0,
open_browser: false,
force_state: Some(state.clone()),
forced_chatgpt_workspace_id: Some("org-required".to_string()),
};
let server = run_login_server(opts)?;
assert!(
server
.auth_url
.contains("allowed_workspace_id=org-required"),
"auth URL should include forced workspace parameter"
);
let login_port = server.actual_port;
let client = reqwest::Client::new();
let url = format!("http://127.0.0.1:{login_port}/auth/callback?code=abc&state={state}");
let resp = client.get(&url).send().await?;
assert!(resp.status().is_success());
let body = resp.text().await?;
assert!(
body.contains("Login is restricted to workspace id org-required"),
"error body should mention workspace restriction"
);
let result = server.block_until_done().await;
assert!(
result.is_err(),
"login should fail due to workspace mismatch"
);
let err = result.unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::PermissionDenied);
let auth_path = codex_home.join("auth.json");
assert!(
!auth_path.exists(),
"auth.json should not be written when the workspace mismatches"
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn cancels_previous_login_server_when_port_is_in_use() -> Result<()> {
skip_if_no_network!(Ok(()));
let (issuer_addr, _issuer_handle) = start_mock_issuer();
let (issuer_addr, _issuer_handle) = start_mock_issuer("org-123");
let issuer = format!("http://{}:{}", issuer_addr.ip(), issuer_addr.port());
let first_tmp = tempdir()?;
@@ -202,6 +268,7 @@ async fn cancels_previous_login_server_when_port_is_in_use() -> Result<()> {
port: 0,
open_browser: false,
force_state: Some("cancel_state".to_string()),
forced_chatgpt_workspace_id: None,
};
let first_server = run_login_server(first_opts)?;
@@ -220,6 +287,7 @@ async fn cancels_previous_login_server_when_port_is_in_use() -> Result<()> {
port: login_port,
open_browser: false,
force_state: Some("cancel_state_2".to_string()),
forced_chatgpt_workspace_id: None,
};
let second_server = run_login_server(second_opts)?;

View File

@@ -59,3 +59,11 @@ pub enum SandboxMode {
#[serde(rename = "danger-full-access")]
DangerFullAccess,
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Display, TS)]
#[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
pub enum ForcedLoginMethod {
Chatgpt,
Api,
}

View File

@@ -11,6 +11,7 @@ use codex_core::BUILT_IN_OSS_MODEL_PROVIDER_ID;
use codex_core::CodexAuth;
use codex_core::INTERACTIVE_SESSION_SOURCES;
use codex_core::RolloutRecorder;
use codex_core::auth::enforce_login_restrictions;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::find_conversation_path_by_id_str;
@@ -193,6 +194,12 @@ pub async fn run_main(
let config = load_config_or_exit(cli_kv_overrides.clone(), overrides.clone()).await;
#[allow(clippy::print_stderr)]
if let Err(err) = enforce_login_restrictions(&config).await {
eprintln!("{err}");
std::process::exit(1);
}
let active_profile = config.active_profile.clone();
let log_dir = codex_core::config::log_dir(&config)?;
std::fs::create_dir_all(&log_dir)?;

View File

@@ -28,6 +28,7 @@ use ratatui::widgets::WidgetRef;
use ratatui::widgets::Wrap;
use codex_app_server_protocol::AuthMode;
use codex_protocol::config_types::ForcedLoginMethod;
use std::sync::RwLock;
use crate::LoginStatus;
@@ -50,6 +51,8 @@ pub(crate) enum SignInState {
ApiKeyConfigured,
}
const API_KEY_DISABLED_MESSAGE: &str = "API key login is disabled.";
#[derive(Clone, Default)]
pub(crate) struct ApiKeyInputState {
value: String,
@@ -79,25 +82,41 @@ impl KeyboardHandler for AuthModeWidget {
match key_event.code {
KeyCode::Up | KeyCode::Char('k') => {
if self.is_chatgpt_login_allowed() {
self.highlighted_mode = AuthMode::ChatGPT;
}
}
KeyCode::Down | KeyCode::Char('j') => {
if self.is_api_login_allowed() {
self.highlighted_mode = AuthMode::ApiKey;
}
}
KeyCode::Char('1') => {
if self.is_chatgpt_login_allowed() {
self.start_chatgpt_login();
}
KeyCode::Char('2') => self.start_api_key_entry(),
}
KeyCode::Char('2') => {
if self.is_api_login_allowed() {
self.start_api_key_entry();
} else {
self.disallow_api_login();
}
}
KeyCode::Enter => {
let sign_in_state = { (*self.sign_in_state.read().unwrap()).clone() };
match sign_in_state {
SignInState::PickMode => match self.highlighted_mode {
AuthMode::ChatGPT => {
AuthMode::ChatGPT if self.is_chatgpt_login_allowed() => {
self.start_chatgpt_login();
}
AuthMode::ApiKey => {
AuthMode::ApiKey if self.is_api_login_allowed() => {
self.start_api_key_entry();
}
AuthMode::ChatGPT => {}
AuthMode::ApiKey => {
self.disallow_api_login();
}
},
SignInState::ChatGptSuccessMessage => {
*self.sign_in_state.write().unwrap() = SignInState::ChatGptSuccess;
@@ -131,9 +150,26 @@ pub(crate) struct AuthModeWidget {
pub codex_home: PathBuf,
pub login_status: LoginStatus,
pub auth_manager: Arc<AuthManager>,
pub forced_chatgpt_workspace_id: Option<String>,
pub forced_login_method: Option<ForcedLoginMethod>,
}
impl AuthModeWidget {
fn is_api_login_allowed(&self) -> bool {
!matches!(self.forced_login_method, Some(ForcedLoginMethod::Chatgpt))
}
fn is_chatgpt_login_allowed(&self) -> bool {
!matches!(self.forced_login_method, Some(ForcedLoginMethod::Api))
}
fn disallow_api_login(&mut self) {
self.highlighted_mode = AuthMode::ChatGPT;
self.error = Some(API_KEY_DISABLED_MESSAGE.to_string());
*self.sign_in_state.write().unwrap() = SignInState::PickMode;
self.request_frame.schedule_frame();
}
fn render_pick_mode(&self, area: Rect, buf: &mut Buffer) {
let mut lines: Vec<Line> = vec![
Line::from(vec![
@@ -176,13 +212,19 @@ impl AuthModeWidget {
vec![line1, line2]
};
let chatgpt_description = if self.is_chatgpt_login_allowed() {
"Usage included with Plus, Pro, and Team plans"
} else {
"ChatGPT login is disabled"
};
lines.extend(create_mode_item(
0,
AuthMode::ChatGPT,
"Sign in with ChatGPT",
"Usage included with Plus, Pro, and Team plans",
chatgpt_description,
));
lines.push("".into());
if self.is_api_login_allowed() {
lines.extend(create_mode_item(
1,
AuthMode::ApiKey,
@@ -190,6 +232,14 @@ impl AuthModeWidget {
"Pay for what you use",
));
lines.push("".into());
} else {
lines.push(
" API key login is disabled by this workspace. Sign in with ChatGPT to continue."
.dim()
.into(),
);
lines.push("".into());
}
lines.push(
// AE: Following styles.md, this should probably be Cyan because it's a user input tip.
// But leaving this for a future cleanup.
@@ -428,6 +478,10 @@ impl AuthModeWidget {
}
fn start_api_key_entry(&mut self) {
if !self.is_api_login_allowed() {
self.disallow_api_login();
return;
}
self.error = None;
let prefill_from_env = read_openai_api_key_from_env();
let mut guard = self.sign_in_state.write().unwrap();
@@ -454,6 +508,10 @@ impl AuthModeWidget {
}
fn save_api_key(&mut self, api_key: String) {
if !self.is_api_login_allowed() {
self.disallow_api_login();
return;
}
match login_with_api_key(&self.codex_home, &api_key) {
Ok(()) => {
self.error = None;
@@ -491,7 +549,11 @@ impl AuthModeWidget {
}
self.error = None;
let opts = ServerOptions::new(self.codex_home.clone(), CLIENT_ID.to_string());
let opts = ServerOptions::new(
self.codex_home.clone(),
CLIENT_ID.to_string(),
self.forced_chatgpt_workspace_id.clone(),
);
match run_login_server(opts) {
Ok(child) => {
let sign_in_state = self.sign_in_state.clone();
@@ -571,3 +633,54 @@ impl WidgetRef for AuthModeWidget {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
fn widget_forced_chatgpt() -> (AuthModeWidget, TempDir) {
let codex_home = TempDir::new().unwrap();
let codex_home_path = codex_home.path().to_path_buf();
let widget = AuthModeWidget {
request_frame: FrameRequester::test_dummy(),
highlighted_mode: AuthMode::ChatGPT,
error: None,
sign_in_state: Arc::new(RwLock::new(SignInState::PickMode)),
codex_home: codex_home_path.clone(),
login_status: LoginStatus::NotAuthenticated,
auth_manager: AuthManager::shared(codex_home_path, false),
forced_chatgpt_workspace_id: None,
forced_login_method: Some(ForcedLoginMethod::Chatgpt),
};
(widget, codex_home)
}
#[test]
fn api_key_flow_disabled_when_chatgpt_forced() {
let (mut widget, _tmp) = widget_forced_chatgpt();
widget.start_api_key_entry();
assert_eq!(widget.error.as_deref(), Some(API_KEY_DISABLED_MESSAGE));
assert!(matches!(
&*widget.sign_in_state.read().unwrap(),
SignInState::PickMode
));
}
#[test]
fn saving_api_key_is_blocked_when_chatgpt_forced() {
let (mut widget, _tmp) = widget_forced_chatgpt();
widget.save_api_key("sk-test".to_string());
assert_eq!(widget.error.as_deref(), Some(API_KEY_DISABLED_MESSAGE));
assert!(matches!(
&*widget.sign_in_state.read().unwrap(),
SignInState::PickMode
));
assert_eq!(widget.login_status, LoginStatus::NotAuthenticated);
}
}

View File

@@ -12,6 +12,7 @@ use ratatui::widgets::Clear;
use ratatui::widgets::WidgetRef;
use codex_app_server_protocol::AuthMode;
use codex_protocol::config_types::ForcedLoginMethod;
use crate::LoginStatus;
use crate::onboarding::auth::AuthModeWidget;
@@ -83,6 +84,8 @@ impl OnboardingScreen {
config,
} = args;
let cwd = config.cwd.clone();
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
let forced_login_method = config.forced_login_method;
let codex_home = config.codex_home;
let mut steps: Vec<Step> = Vec::new();
if show_windows_wsl_screen {
@@ -93,14 +96,20 @@ impl OnboardingScreen {
tui.frame_requester(),
)));
if show_login_screen {
let highlighted_mode = match forced_login_method {
Some(ForcedLoginMethod::Api) => AuthMode::ApiKey,
_ => AuthMode::ChatGPT,
};
steps.push(Step::Auth(AuthModeWidget {
request_frame: tui.frame_requester(),
highlighted_mode: AuthMode::ChatGPT,
highlighted_mode,
error: None,
sign_in_state: Arc::new(RwLock::new(SignInState::PickMode)),
codex_home: codex_home.clone(),
login_status,
auth_manager,
forced_chatgpt_workspace_id,
forced_login_method,
}))
}
let is_git_repo = get_git_repo_root(&cwd).is_some();

View File

@@ -831,6 +831,22 @@ notifications = [ "agent-turn-complete", "approval-requested" ]
> [!NOTE] > `tui.notifications` is builtin and limited to the TUI session. For programmatic or crossenvironment notifications—or to integrate with OSspecific notifiers—use the toplevel `notify` option to run an external program that receives event JSON. The two settings are independent and can be used together.
## Forcing a login method
To force users on a given machine to use a specific login method or workspace, use a combination of [managed configurations](https://developers.openai.com/codex/security#managed-configuration) as well as either or both of the following fields:
```toml
# Force the user to log in with ChatGPT or via an api key.
forced_login_method = "chatgpt" or "api"
# When logging in with ChatGPT, only the specified workspace ID will be presented during the login
# flow and the id will be validated during the oauth callback as well as every time Codex starts.
forced_chatgpt_workspace_id = "00000000-0000-0000-0000-000000000000"
```
If the active credentials don't match the config, the user will be logged out and Codex will exit.
If `forced_chatgpt_workspace_id` is set but `forced_login_method` is not set, API key login will still work.
## Config reference
| Key | Type / Values | Notes |
@@ -885,4 +901,6 @@ notifications = [ "agent-turn-complete", "approval-requested" ]
| `experimental_use_exec_command_tool` | boolean | Use experimental exec command tool. |
| `projects.<path>.trust_level` | string | Mark project/worktree as trusted (only `"trusted"` is recognized). |
| `tools.web_search` | boolean | Enable web search tool (alias: `web_search_request`) (default: false). |
| `forced_login_method` | `chatgpt` \| `api` | Only allow Codex to be used with ChatGPT or API keys. |
| `forced_chatgpt_workspace_id` | string (uuid) | Only allow Codex to be used with the specified ChatGPT workspace. |
| `tools.view_image` | boolean | Enable the `view_image` tool so Codex can attach local image files from the workspace (default: false). |