diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs index 8685bc54..fd530e0c 100644 --- a/codex-rs/core/src/client.rs +++ b/codex-rs/core/src/client.rs @@ -93,9 +93,7 @@ impl ModelClient { // Wrap it with the aggregation adapter so callers see *only* // the final assistant message per turn (matching the // behaviour of the Responses API). - let mut aggregated = if self.config.show_reasoning_content - && !self.config.hide_agent_reasoning - { + let mut aggregated = if !self.config.hide_agent_reasoning { crate::chat_completions::AggregatedChatStream::streaming_mode(response_stream) } else { response_stream.aggregate() diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs index 18bcf626..caebaed2 100644 --- a/codex-rs/core/src/codex.rs +++ b/codex-rs/core/src/codex.rs @@ -229,7 +229,6 @@ pub(crate) struct Session { state: Mutex, codex_linux_sandbox_exe: Option, user_shell: shell::Shell, - show_reasoning_content: bool, hide_agent_reasoning: bool, } @@ -826,7 +825,6 @@ async fn submission_loop( codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(), disable_response_storage, user_shell: default_shell, - show_reasoning_content: config.show_reasoning_content, hide_agent_reasoning: config.hide_agent_reasoning, })); @@ -1521,7 +1519,7 @@ async fn handle_response_item( sess.tx_event.send(event).await.ok(); } } - if !sess.hide_agent_reasoning && sess.show_reasoning_content && content.is_some() { + if !sess.hide_agent_reasoning && content.is_some() { let content = content.unwrap(); for item in content { let text = match item { diff --git a/codex-rs/core/src/config.rs b/codex-rs/core/src/config.rs index 3277ca08..302c468b 100644 --- a/codex-rs/core/src/config.rs +++ b/codex-rs/core/src/config.rs @@ -57,10 +57,6 @@ pub struct Config { /// users are only interested in the final agent responses. pub hide_agent_reasoning: bool, - /// When `true`, the raw chain-of-thought text from reasoning events will be - /// displayed in the UI in addition to the reasoning summaries. - pub show_reasoning_content: bool, - /// Disable server-side response storage (sends the full conversation /// context with every request). Currently necessary for OpenAI customers /// who have opted into Zero Data Retention (ZDR). @@ -329,10 +325,6 @@ pub struct ConfigToml { /// UI/output. Defaults to `false`. pub hide_agent_reasoning: Option, - /// When set to `true`, raw chain-of-thought text from reasoning events will - /// be shown in the UI. - pub show_reasoning_content: Option, - pub model_reasoning_effort: Option, pub model_reasoning_summary: Option, @@ -499,15 +491,6 @@ impl Config { // Resolve hide/show reasoning flags with consistent precedence: // if hide is true, force show_reasoning_content to false. let hide_agent_reasoning_val = cfg.hide_agent_reasoning.unwrap_or(false); - let show_reasoning_content_val = if hide_agent_reasoning_val { - false - } else { - cfg.show_reasoning_content.unwrap_or(false) - }; - - if cfg.hide_agent_reasoning == Some(true) && cfg.show_reasoning_content == Some(true) { - tracing::warn!("Ignoring show_reasoning_content because hide_agent_reasoning is true"); - } let config = Self { model, @@ -539,7 +522,6 @@ impl Config { codex_linux_sandbox_exe, hide_agent_reasoning: hide_agent_reasoning_val, - show_reasoning_content: show_reasoning_content_val, model_reasoning_effort: config_profile .model_reasoning_effort .or(cfg.model_reasoning_effort) @@ -913,7 +895,6 @@ disable_response_storage = true tui: Tui::default(), codex_linux_sandbox_exe: None, hide_agent_reasoning: false, - show_reasoning_content: false, model_reasoning_effort: ReasoningEffort::High, model_reasoning_summary: ReasoningSummary::Detailed, model_supports_reasoning_summaries: false, @@ -964,7 +945,6 @@ disable_response_storage = true tui: Tui::default(), codex_linux_sandbox_exe: None, hide_agent_reasoning: false, - show_reasoning_content: false, model_reasoning_effort: ReasoningEffort::default(), model_reasoning_summary: ReasoningSummary::default(), model_supports_reasoning_summaries: false, @@ -1030,7 +1010,6 @@ disable_response_storage = true tui: Tui::default(), codex_linux_sandbox_exe: None, hide_agent_reasoning: false, - show_reasoning_content: false, model_reasoning_effort: ReasoningEffort::default(), model_reasoning_summary: ReasoningSummary::default(), model_supports_reasoning_summaries: false,