diff --git a/codex-rs/mcp-server/src/codex_message_processor.rs b/codex-rs/mcp-server/src/codex_message_processor.rs index 3fc4eec0..ceb47889 100644 --- a/codex-rs/mcp-server/src/codex_message_processor.rs +++ b/codex-rs/mcp-server/src/codex_message_processor.rs @@ -423,32 +423,41 @@ impl CodexMessageProcessor { // Determine whether auth is required based on the active model provider. // If a custom provider is configured with `requires_openai_auth == false`, // then no auth step is required; otherwise, default to requiring auth. - let requires_openai_auth = Some(self.config.model_provider.requires_openai_auth); + let requires_openai_auth = self.config.model_provider.requires_openai_auth; - let response = match self.auth_manager.auth() { - Some(auth) => { - let (reported_auth_method, token_opt) = match auth.get_token().await { - Ok(token) if !token.is_empty() => { - let tok = if include_token { Some(token) } else { None }; - (Some(auth.mode), tok) - } - Ok(_) => (None, None), - Err(err) => { - tracing::warn!("failed to get token for auth status: {err}"); - (None, None) - } - }; - codex_protocol::mcp_protocol::GetAuthStatusResponse { - auth_method: reported_auth_method, - auth_token: token_opt, - requires_openai_auth, - } - } - None => codex_protocol::mcp_protocol::GetAuthStatusResponse { + let response = if !requires_openai_auth { + codex_protocol::mcp_protocol::GetAuthStatusResponse { auth_method: None, auth_token: None, - requires_openai_auth, - }, + requires_openai_auth: Some(false), + } + } else { + match self.auth_manager.auth() { + Some(auth) => { + let auth_mode = auth.mode; + let (reported_auth_method, token_opt) = match auth.get_token().await { + Ok(token) if !token.is_empty() => { + let tok = if include_token { Some(token) } else { None }; + (Some(auth_mode), tok) + } + Ok(_) => (None, None), + Err(err) => { + tracing::warn!("failed to get token for auth status: {err}"); + (None, None) + } + }; + codex_protocol::mcp_protocol::GetAuthStatusResponse { + auth_method: reported_auth_method, + auth_token: token_opt, + requires_openai_auth: Some(true), + } + } + None => codex_protocol::mcp_protocol::GetAuthStatusResponse { + auth_method: None, + auth_token: None, + requires_openai_auth: Some(true), + }, + } }; self.outgoing.send_response(request_id, response).await; diff --git a/codex-rs/mcp-server/tests/suite/auth.rs b/codex-rs/mcp-server/tests/suite/auth.rs index a3ccd339..6681fd75 100644 --- a/codex-rs/mcp-server/tests/suite/auth.rs +++ b/codex-rs/mcp-server/tests/suite/auth.rs @@ -15,11 +15,17 @@ use tokio::time::timeout; const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); -// Helper to create a config.toml; mirrors create_conversation.rs -fn create_config_toml(codex_home: &Path) -> std::io::Result<()> { +fn create_config_toml_custom_provider( + codex_home: &Path, + requires_openai_auth: bool, +) -> std::io::Result<()> { let config_toml = codex_home.join("config.toml"); - std::fs::write( - config_toml, + let requires_line = if requires_openai_auth { + "requires_openai_auth = true\n" + } else { + "" + }; + let contents = format!( r#" model = "mock-model" approval_policy = "never" @@ -33,6 +39,20 @@ base_url = "http://127.0.0.1:0/v1" wire_api = "chat" request_max_retries = 0 stream_max_retries = 0 +{requires_line} +"# + ); + std::fs::write(config_toml, contents) +} + +fn create_config_toml(codex_home: &Path) -> std::io::Result<()> { + let config_toml = codex_home.join("config.toml"); + std::fs::write( + config_toml, + r#" +model = "mock-model" +approval_policy = "never" +sandbox_mode = "danger-full-access" "#, ) } @@ -124,6 +144,47 @@ async fn get_auth_status_with_api_key() { assert_eq!(status.auth_token, Some("sk-test-key".to_string())); } +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn get_auth_status_with_api_key_when_auth_not_required() { + let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}")); + create_config_toml_custom_provider(codex_home.path(), false) + .unwrap_or_else(|err| panic!("write config.toml: {err}")); + + let mut mcp = McpProcess::new(codex_home.path()) + .await + .expect("spawn mcp process"); + timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()) + .await + .expect("init timeout") + .expect("init failed"); + + login_with_api_key_via_request(&mut mcp, "sk-test-key").await; + + let request_id = mcp + .send_get_auth_status_request(GetAuthStatusParams { + include_token: Some(true), + refresh_token: Some(false), + }) + .await + .expect("send getAuthStatus"); + + let resp: JSONRPCResponse = timeout( + DEFAULT_READ_TIMEOUT, + mcp.read_stream_until_response_message(RequestId::Integer(request_id)), + ) + .await + .expect("getAuthStatus timeout") + .expect("getAuthStatus response"); + let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status"); + assert_eq!(status.auth_method, None, "expected no auth method"); + assert_eq!(status.auth_token, None, "expected no token"); + assert_eq!( + status.requires_openai_auth, + Some(false), + "requires_openai_auth should be false", + ); +} + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn get_auth_status_with_api_key_no_include_token() { let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));