Add forced_chatgpt_workspace_id and forced_login_method configuration options (#5303)

This PR adds support for configs to specify a forced login method
(chatgpt or api) as well as a forced chatgpt account id. This lets
enterprises uses [managed
configs](https://developers.openai.com/codex/security#managed-configuration)
to force all employees to use their company's workspace instead of their
own or any other.

When a workspace id is set, a query param is sent to the login flow
which auto-selects the given workspace or errors if the user isn't a
member of it.

This PR is large but a large % of it is tests, wiring, and required
formatting changes.

API login with chatgpt forced
<img width="1592" height="116" alt="CleanShot 2025-10-19 at 22 40 04"
src="https://github.com/user-attachments/assets/560c6bb4-a20a-4a37-95af-93df39d057dd"
/>

ChatGPT login with api forced
<img width="1018" height="100" alt="CleanShot 2025-10-19 at 22 40 29"
src="https://github.com/user-attachments/assets/d010bbbb-9c8d-4227-9eda-e55bf043b4af"
/>

Onboarding with api forced
<img width="892" height="460" alt="CleanShot 2025-10-19 at 22 41 02"
src="https://github.com/user-attachments/assets/cc0ed45c-b257-4d62-a32e-6ca7514b5edd"
/>

Onboarding with ChatGPT forced
<img width="1154" height="426" alt="CleanShot 2025-10-19 at 22 41 27"
src="https://github.com/user-attachments/assets/41c41417-dc68-4bb4-b3e7-3b7769f7e6a1"
/>

Logging in with the wrong workspace
<img width="2222" height="84" alt="CleanShot 2025-10-19 at 22 42 31"
src="https://github.com/user-attachments/assets/0ff4222c-f626-4dd3-b035-0b7fe998a046"
/>
This commit is contained in:
Gabriel Peal
2025-10-20 08:50:54 -07:00
committed by GitHub
parent d01f91ecec
commit d87f87e25b
19 changed files with 920 additions and 66 deletions

View File

@@ -2,6 +2,8 @@ use chrono::DateTime;
use chrono::Utc;
use serde::Deserialize;
use serde::Serialize;
#[cfg(test)]
use serial_test::serial;
use std::env;
use std::fs::File;
use std::fs::OpenOptions;
@@ -16,7 +18,9 @@ use std::sync::Mutex;
use std::time::Duration;
use codex_app_server_protocol::AuthMode;
use codex_protocol::config_types::ForcedLoginMethod;
use crate::config::Config;
use crate::token_data::PlanType;
use crate::token_data::TokenData;
use crate::token_data::parse_id_token;
@@ -233,6 +237,74 @@ pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<(
write_auth_json(&get_auth_file(codex_home), &auth_dot_json)
}
pub async fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
let Some(auth) = load_auth(&config.codex_home, true)? else {
return Ok(());
};
if let Some(required_method) = config.forced_login_method {
let method_violation = match (required_method, auth.mode) {
(ForcedLoginMethod::Api, AuthMode::ApiKey) => None,
(ForcedLoginMethod::Chatgpt, AuthMode::ChatGPT) => None,
(ForcedLoginMethod::Api, AuthMode::ChatGPT) => Some(
"API key login is required, but ChatGPT is currently being used. Logging out."
.to_string(),
),
(ForcedLoginMethod::Chatgpt, AuthMode::ApiKey) => Some(
"ChatGPT login is required, but an API key is currently being used. Logging out."
.to_string(),
),
};
if let Some(message) = method_violation {
return logout_with_message(&config.codex_home, message);
}
}
if let Some(expected_account_id) = config.forced_chatgpt_workspace_id.as_deref() {
if auth.mode != AuthMode::ChatGPT {
return Ok(());
}
let token_data = match auth.get_token_data().await {
Ok(data) => data,
Err(err) => {
return logout_with_message(
&config.codex_home,
format!(
"Failed to load ChatGPT credentials while enforcing workspace restrictions: {err}. Logging out."
),
);
}
};
// workspace is the external identifier for account id.
let chatgpt_account_id = token_data.id_token.chatgpt_account_id.as_deref();
if chatgpt_account_id != Some(expected_account_id) {
let message = match chatgpt_account_id {
Some(actual) => format!(
"Login is restricted to workspace {expected_account_id}, but current credentials belong to {actual}. Logging out."
),
None => format!(
"Login is restricted to workspace {expected_account_id}, but current credentials lack a workspace identifier. Logging out."
),
};
return logout_with_message(&config.codex_home, message);
}
}
Ok(())
}
fn logout_with_message(codex_home: &Path, message: String) -> std::io::Result<()> {
match logout(codex_home) {
Ok(_) => Err(std::io::Error::other(message)),
Err(err) => Err(std::io::Error::other(format!(
"{message}. Failed to remove auth.json: {err}"
))),
}
}
fn load_auth(
codex_home: &Path,
enable_codex_api_key_env: bool,
@@ -249,9 +321,8 @@ fn load_auth(
let client = crate::default_client::create_client();
let auth_dot_json = match try_read_auth_json(&auth_file) {
Ok(auth) => auth,
Err(e) => {
return Err(e);
}
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err),
};
let AuthDotJson {
@@ -403,17 +474,19 @@ struct CachedAuth {
#[cfg(test)]
mod tests {
use super::*;
use crate::config::Config;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::token_data::IdTokenInfo;
use crate::token_data::KnownPlan;
use crate::token_data::PlanType;
use base64::Engine;
use codex_protocol::config_types::ForcedLoginMethod;
use pretty_assertions::assert_eq;
use serde::Serialize;
use serde_json::json;
use tempfile::tempdir;
const LAST_REFRESH: &str = "2025-08-06T20:41:36.232376Z";
#[tokio::test]
async fn roundtrip_auth_dot_json() {
let codex_home = tempdir().unwrap();
@@ -421,6 +494,7 @@ mod tests {
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: None,
},
codex_home.path(),
)
@@ -460,6 +534,13 @@ mod tests {
assert!(auth.tokens.is_none(), "tokens should be cleared");
}
#[test]
fn missing_auth_json_returns_none() {
let dir = tempdir().unwrap();
let auth = CodexAuth::from_codex_home(dir.path()).expect("call should succeed");
assert_eq!(auth, None);
}
#[tokio::test]
async fn pro_account_with_no_api_key_uses_chatgpt_auth() {
let codex_home = tempdir().unwrap();
@@ -467,6 +548,7 @@ mod tests {
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: None,
},
codex_home.path(),
)
@@ -484,6 +566,10 @@ mod tests {
let guard = auth_dot_json.lock().unwrap();
let auth_dot_json = guard.as_ref().expect("AuthDotJson should exist");
let last_refresh = auth_dot_json
.last_refresh
.expect("last_refresh should be recorded");
assert_eq!(
&AuthDotJson {
openai_api_key: None,
@@ -491,20 +577,17 @@ mod tests {
id_token: IdTokenInfo {
email: Some("user@example.com".to_string()),
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
chatgpt_account_id: None,
raw_jwt: fake_jwt,
},
access_token: "test-access-token".to_string(),
refresh_token: "test-refresh-token".to_string(),
account_id: None,
}),
last_refresh: Some(
DateTime::parse_from_rfc3339(LAST_REFRESH)
.unwrap()
.with_timezone(&Utc)
),
last_refresh: Some(last_refresh),
},
auth_dot_json
)
);
}
#[tokio::test]
@@ -543,6 +626,7 @@ mod tests {
struct AuthFileParams {
openai_api_key: Option<String>,
chatgpt_plan_type: String,
chatgpt_account_id: Option<String>,
}
fn write_auth_file(params: AuthFileParams, codex_home: &Path) -> std::io::Result<String> {
@@ -557,15 +641,21 @@ mod tests {
alg: "none",
typ: "JWT",
};
let mut auth_payload = serde_json::json!({
"chatgpt_plan_type": params.chatgpt_plan_type,
"chatgpt_user_id": "user-12345",
"user_id": "user-12345",
});
if let Some(chatgpt_account_id) = params.chatgpt_account_id {
let org_value = serde_json::Value::String(chatgpt_account_id);
auth_payload["chatgpt_account_id"] = org_value;
}
let payload = serde_json::json!({
"email": "user@example.com",
"email_verified": true,
"https://api.openai.com/auth": {
"chatgpt_account_id": "bc3618e3-489d-4d49-9362-1561dc53ba53",
"chatgpt_plan_type": params.chatgpt_plan_type,
"chatgpt_user_id": "user-12345",
"user_id": "user-12345",
}
"https://api.openai.com/auth": auth_payload,
});
let b64 = |b: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(b);
let header_b64 = b64(&serde_json::to_vec(&header)?);
@@ -580,12 +670,159 @@ mod tests {
"access_token": "test-access-token",
"refresh_token": "test-refresh-token"
},
"last_refresh": LAST_REFRESH,
"last_refresh": Utc::now(),
});
let auth_json = serde_json::to_string_pretty(&auth_json_data)?;
std::fs::write(auth_file, auth_json)?;
Ok(fake_jwt)
}
fn build_config(
codex_home: &Path,
forced_login_method: Option<ForcedLoginMethod>,
forced_chatgpt_workspace_id: Option<String>,
) -> Config {
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.to_path_buf(),
)
.expect("config should load");
config.forced_login_method = forced_login_method;
config.forced_chatgpt_workspace_id = forced_chatgpt_workspace_id;
config
}
/// Use sparingly.
/// TODO (gpeal): replace this with an injectable env var provider.
#[cfg(test)]
struct EnvVarGuard {
key: &'static str,
original: Option<std::ffi::OsString>,
}
#[cfg(test)]
impl EnvVarGuard {
fn set(key: &'static str, value: &str) -> Self {
let original = env::var_os(key);
unsafe {
env::set_var(key, value);
}
Self { key, original }
}
}
#[cfg(test)]
impl Drop for EnvVarGuard {
fn drop(&mut self) {
unsafe {
match &self.original {
Some(value) => env::set_var(self.key, value),
None => env::remove_var(self.key),
}
}
}
}
#[tokio::test]
async fn enforce_login_restrictions_logs_out_for_method_mismatch() {
let codex_home = tempdir().unwrap();
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
let err = super::enforce_login_restrictions(&config)
.await
.expect_err("expected method mismatch to error");
assert!(err.to_string().contains("ChatGPT login is required"));
assert!(
!codex_home.path().join("auth.json").exists(),
"auth.json should be removed on mismatch"
);
}
#[tokio::test]
async fn enforce_login_restrictions_logs_out_for_workspace_mismatch() {
let codex_home = tempdir().unwrap();
let _jwt = write_auth_file(
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: Some("org_another_org".to_string()),
},
codex_home.path(),
)
.expect("failed to write auth file");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
let err = super::enforce_login_restrictions(&config)
.await
.expect_err("expected workspace mismatch to error");
assert!(err.to_string().contains("workspace org_mine"));
assert!(
!codex_home.path().join("auth.json").exists(),
"auth.json should be removed on mismatch"
);
}
#[tokio::test]
async fn enforce_login_restrictions_allows_matching_workspace() {
let codex_home = tempdir().unwrap();
let _jwt = write_auth_file(
AuthFileParams {
openai_api_key: None,
chatgpt_plan_type: "pro".to_string(),
chatgpt_account_id: Some("org_mine".to_string()),
},
codex_home.path(),
)
.expect("failed to write auth file");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
super::enforce_login_restrictions(&config)
.await
.expect("matching workspace should succeed");
assert!(
codex_home.path().join("auth.json").exists(),
"auth.json should remain when restrictions pass"
);
}
#[tokio::test]
async fn enforce_login_restrictions_allows_api_key_if_login_method_not_set_but_forced_chatgpt_workspace_id_is_set()
{
let codex_home = tempdir().unwrap();
login_with_api_key(codex_home.path(), "sk-test").expect("seed api key");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
super::enforce_login_restrictions(&config)
.await
.expect("matching workspace should succeed");
assert!(
codex_home.path().join("auth.json").exists(),
"auth.json should remain when restrictions pass"
);
}
#[tokio::test]
#[serial(codex_api_key)]
async fn enforce_login_restrictions_blocks_env_api_key_when_chatgpt_required() {
let _guard = EnvVarGuard::set(CODEX_API_KEY_ENV_VAR, "sk-env");
let codex_home = tempdir().unwrap();
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
let err = super::enforce_login_restrictions(&config)
.await
.expect_err("environment API key should not satisfy forced ChatGPT login");
assert!(
err.to_string()
.contains("ChatGPT login is required, but an API key is currently being used.")
);
}
}
/// Central manager providing a single source of truth for auth.json derived

View File

@@ -36,6 +36,7 @@ use crate::protocol::SandboxPolicy;
use anyhow::Context;
use codex_app_server_protocol::Tools;
use codex_app_server_protocol::UserSavedConfig;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode;
@@ -209,6 +210,12 @@ pub struct Config {
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: String,
/// When set, restricts ChatGPT login to a specific workspace identifier.
pub forced_chatgpt_workspace_id: Option<String>,
/// When set, restricts the login mechanism users may use.
pub forced_login_method: Option<ForcedLoginMethod>,
/// Include an experimental plan tool that the model can use to update its current plan and status of each step.
pub include_plan_tool: bool,
@@ -844,6 +851,14 @@ pub struct ConfigToml {
/// System instructions.
pub instructions: Option<String>,
/// When set, restricts ChatGPT login to a specific workspace identifier.
#[serde(default)]
pub forced_chatgpt_workspace_id: Option<String>,
/// When set, restricts the login mechanism users may use.
#[serde(default)]
pub forced_login_method: Option<ForcedLoginMethod>,
/// Definition for MCP servers that Codex can reach out to for tool calls.
#[serde(default)]
pub mcp_servers: HashMap<String, McpServerConfig>,
@@ -950,6 +965,8 @@ impl From<ConfigToml> for UserSavedConfig {
approval_policy: config_toml.approval_policy,
sandbox_mode: config_toml.sandbox_mode,
sandbox_settings: config_toml.sandbox_workspace_write.map(From::from),
forced_chatgpt_workspace_id: config_toml.forced_chatgpt_workspace_id,
forced_login_method: config_toml.forced_login_method,
model: config_toml.model,
model_reasoning_effort: config_toml.model_reasoning_effort,
model_reasoning_summary: config_toml.model_reasoning_summary,
@@ -1250,6 +1267,18 @@ impl Config {
let use_experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
let use_experimental_use_rmcp_client = features.enabled(Feature::RmcpClient);
let forced_chatgpt_workspace_id =
cfg.forced_chatgpt_workspace_id.as_ref().and_then(|value| {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
});
let forced_login_method = cfg.forced_login_method;
let model = model
.or(config_profile.model)
.or(cfg.model)
@@ -1358,6 +1387,8 @@ impl Config {
.chatgpt_base_url
.or(cfg.chatgpt_base_url)
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
forced_chatgpt_workspace_id,
forced_login_method,
include_plan_tool: include_plan_tool_flag,
include_apply_patch_tool: include_apply_patch_tool_flag,
tools_web_search_request,
@@ -2800,6 +2831,8 @@ model_verbosity = "high"
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
@@ -2867,6 +2900,8 @@ model_verbosity = "high"
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
@@ -2949,6 +2984,8 @@ model_verbosity = "high"
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,
@@ -3017,6 +3054,8 @@ model_verbosity = "high"
model_verbosity: Some(Verbosity::High),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
forced_chatgpt_workspace_id: None,
forced_login_method: None,
include_plan_tool: false,
include_apply_patch_tool: false,
tools_web_search_request: false,

View File

@@ -28,6 +28,8 @@ pub struct IdTokenInfo {
/// (e.g., "free", "plus", "pro", "business", "enterprise", "edu").
/// (Note: values may vary by backend.)
pub(crate) chatgpt_plan_type: Option<PlanType>,
/// Organization/workspace identifier associated with the token, if present.
pub chatgpt_account_id: Option<String>,
pub raw_jwt: String,
}
@@ -71,6 +73,8 @@ struct IdClaims {
struct AuthClaims {
#[serde(default)]
chatgpt_plan_type: Option<PlanType>,
#[serde(default)]
chatgpt_account_id: Option<String>,
}
#[derive(Debug, Error)]
@@ -94,11 +98,20 @@ pub fn parse_id_token(id_token: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(payload_b64)?;
let claims: IdClaims = serde_json::from_slice(&payload_bytes)?;
Ok(IdTokenInfo {
email: claims.email,
chatgpt_plan_type: claims.auth.and_then(|a| a.chatgpt_plan_type),
raw_jwt: id_token.to_string(),
})
match claims.auth {
Some(auth) => Ok(IdTokenInfo {
email: claims.email,
raw_jwt: id_token.to_string(),
chatgpt_plan_type: auth.chatgpt_plan_type,
chatgpt_account_id: auth.chatgpt_account_id,
}),
None => Ok(IdTokenInfo {
email: claims.email,
raw_jwt: id_token.to_string(),
chatgpt_plan_type: None,
chatgpt_account_id: None,
}),
}
}
fn deserialize_id_token<'de, D>(deserializer: D) -> Result<IdTokenInfo, D::Error>