diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs index ed05fb5d..0fa143fd 100644 --- a/codex-rs/core/src/client.rs +++ b/codex-rs/core/src/client.rs @@ -127,6 +127,15 @@ impl ModelClient { let auth_mode = auth.as_ref().map(|a| a.mode); + if self.config.model_family.family == "2025-08-06-model" + && auth_mode != Some(AuthMode::ChatGPT) + { + return Err(CodexErr::UnexpectedStatus( + StatusCode::BAD_REQUEST, + "2025-08-06-model is only supported with ChatGPT auth, run `codex login status` to check your auth status and `codex login` to login with ChatGPT".to_string(), + )); + } + let store = prompt.store && auth_mode != Some(AuthMode::ChatGPT); let full_instructions = prompt.get_full_instructions(&self.config.model_family); diff --git a/codex-rs/core/src/model_family.rs b/codex-rs/core/src/model_family.rs index 7c4a9de6..cadbceca 100644 --- a/codex-rs/core/src/model_family.rs +++ b/codex-rs/core/src/model_family.rs @@ -89,6 +89,11 @@ pub fn find_family_for_model(slug: &str) -> Option { simple_model_family!(slug, "gpt-oss") } else if slug.starts_with("gpt-3.5") { simple_model_family!(slug, "gpt-3.5") + } else if slug.starts_with("2025-08-06-model") { + model_family!( + slug, "2025-08-06-model", + supports_reasoning_summaries: true, + ) } else { None } diff --git a/codex-rs/core/src/openai_model_info.rs b/codex-rs/core/src/openai_model_info.rs index 935eb8be..0ce94267 100644 --- a/codex-rs/core/src/openai_model_info.rs +++ b/codex-rs/core/src/openai_model_info.rs @@ -77,6 +77,11 @@ pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option { max_output_tokens: 4_096, }), + "2025-08-06-model" => Some(ModelInfo { + context_window: 200_000, + max_output_tokens: 100_000, + }), + _ => None, } }