From de2c6a2ce77240003dc5addf92d2681758b20088 Mon Sep 17 00:00:00 2001 From: pakrym-oai Date: Wed, 13 Aug 2025 17:02:50 -0700 Subject: [PATCH] Enable reasoning for codex-prefixed models (#2275) ## Summary - enable reasoning for any model slug starting with `codex-` - provide default model info for `codex-*` slugs - test that codex models are detected and support reasoning ## Testing - `just fmt` - `just fix` *(fails: E0658 `let` expressions in this position are unstable)* - `cargo test --all-features` *(fails: E0658 `let` expressions in this position are unstable)* ------ https://chatgpt.com/codex/tasks/task_i_689d13f8705483208a6ed21c076868e1 --- codex-rs/core/src/model_family.rs | 5 +++++ codex-rs/core/src/openai_model_info.rs | 8 +++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/codex-rs/core/src/model_family.rs b/codex-rs/core/src/model_family.rs index 1245a030..fa4826d7 100644 --- a/codex-rs/core/src/model_family.rs +++ b/codex-rs/core/src/model_family.rs @@ -78,6 +78,11 @@ pub fn find_family_for_model(slug: &str) -> Option { supports_reasoning_summaries: true, uses_local_shell_tool: true, ) + } else if slug.starts_with("codex-") { + model_family!( + slug, slug, + supports_reasoning_summaries: true, + ) } else if slug.starts_with("gpt-4.1") { model_family!( slug, "gpt-4.1", diff --git a/codex-rs/core/src/openai_model_info.rs b/codex-rs/core/src/openai_model_info.rs index a072d409..66f3c626 100644 --- a/codex-rs/core/src/openai_model_info.rs +++ b/codex-rs/core/src/openai_model_info.rs @@ -15,7 +15,8 @@ pub(crate) struct ModelInfo { } pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option { - match model_family.slug.as_str() { + let slug = model_family.slug.as_str(); + match slug { // OSS models have a 128k shared token pool. // Arbitrarily splitting it: 3/4 input context, 1/4 output. // https://openai.com/index/gpt-oss-model-card/ @@ -82,6 +83,11 @@ pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option { max_output_tokens: 100_000, }), + _ if slug.starts_with("codex-") => Some(ModelInfo { + context_window: 200_000, + max_output_tokens: 100_000, + }), + _ => None, } }