diff --git a/codex-rs/core/src/model_family.rs b/codex-rs/core/src/model_family.rs index 1245a030..fa4826d7 100644 --- a/codex-rs/core/src/model_family.rs +++ b/codex-rs/core/src/model_family.rs @@ -78,6 +78,11 @@ pub fn find_family_for_model(slug: &str) -> Option { supports_reasoning_summaries: true, uses_local_shell_tool: true, ) + } else if slug.starts_with("codex-") { + model_family!( + slug, slug, + supports_reasoning_summaries: true, + ) } else if slug.starts_with("gpt-4.1") { model_family!( slug, "gpt-4.1", diff --git a/codex-rs/core/src/openai_model_info.rs b/codex-rs/core/src/openai_model_info.rs index a072d409..66f3c626 100644 --- a/codex-rs/core/src/openai_model_info.rs +++ b/codex-rs/core/src/openai_model_info.rs @@ -15,7 +15,8 @@ pub(crate) struct ModelInfo { } pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option { - match model_family.slug.as_str() { + let slug = model_family.slug.as_str(); + match slug { // OSS models have a 128k shared token pool. // Arbitrarily splitting it: 3/4 input context, 1/4 output. // https://openai.com/index/gpt-oss-model-card/ @@ -82,6 +83,11 @@ pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option { max_output_tokens: 100_000, }), + _ if slug.starts_with("codex-") => Some(ModelInfo { + context_window: 200_000, + max_output_tokens: 100_000, + }), + _ => None, } }