use crate::error::Result; use crate::model_family::ModelFamily; use crate::openai_tools::OpenAiTool; use crate::protocol::RateLimitSnapshot; use crate::protocol::TokenUsage; use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS; use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig; use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig; use codex_protocol::config_types::Verbosity as VerbosityConfig; use codex_protocol::models::ResponseItem; use futures::Stream; use serde::Serialize; use serde_json::Value; use std::borrow::Cow; use std::ops::Deref; use std::pin::Pin; use std::task::Context; use std::task::Poll; use tokio::sync::mpsc; /// Review thread system prompt. Edit `core/src/review_prompt.md` to customize. pub const REVIEW_PROMPT: &str = include_str!("../review_prompt.md"); /// API request payload for a single model turn #[derive(Default, Debug, Clone)] pub struct Prompt { /// Conversation context input items. pub input: Vec, /// Tools available to the model, including additional tools sourced from /// external MCP servers. pub(crate) tools: Vec, /// Optional override for the built-in BASE_INSTRUCTIONS. pub base_instructions_override: Option, /// Optional the output schema for the model's response. pub output_schema: Option, } impl Prompt { pub(crate) fn get_full_instructions<'a>(&'a self, model: &'a ModelFamily) -> Cow<'a, str> { let base = self .base_instructions_override .as_deref() .unwrap_or(model.base_instructions.deref()); // When there are no custom instructions, add apply_patch_tool_instructions if: // - the model needs special instructions (4.1) // AND // - there is no apply_patch tool present let is_apply_patch_tool_present = self.tools.iter().any(|tool| match tool { OpenAiTool::Function(f) => f.name == "apply_patch", OpenAiTool::Freeform(f) => f.name == "apply_patch", _ => false, }); if self.base_instructions_override.is_none() && model.needs_special_apply_patch_instructions && !is_apply_patch_tool_present { Cow::Owned(format!("{base}\n{APPLY_PATCH_TOOL_INSTRUCTIONS}")) } else { Cow::Borrowed(base) } } pub(crate) fn get_formatted_input(&self) -> Vec { self.input.clone() } } #[derive(Debug)] pub enum ResponseEvent { Created, OutputItemDone(ResponseItem), Completed { response_id: String, token_usage: Option, }, OutputTextDelta(String), ReasoningSummaryDelta(String), ReasoningContentDelta(String), ReasoningSummaryPartAdded, WebSearchCallBegin { call_id: String, }, RateLimits(RateLimitSnapshot), } #[derive(Debug, Serialize)] pub(crate) struct Reasoning { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) effort: Option, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) summary: Option, } #[derive(Debug, Serialize, Default, Clone)] #[serde(rename_all = "snake_case")] pub(crate) enum TextFormatType { #[default] JsonSchema, } #[derive(Debug, Serialize, Default, Clone)] pub(crate) struct TextFormat { pub(crate) r#type: TextFormatType, pub(crate) strict: bool, pub(crate) schema: Value, pub(crate) name: String, } /// Controls under the `text` field in the Responses API for GPT-5. #[derive(Debug, Serialize, Default, Clone)] pub(crate) struct TextControls { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) verbosity: Option, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) format: Option, } #[derive(Debug, Serialize, Default, Clone)] #[serde(rename_all = "lowercase")] pub(crate) enum OpenAiVerbosity { Low, #[default] Medium, High, } impl From for OpenAiVerbosity { fn from(v: VerbosityConfig) -> Self { match v { VerbosityConfig::Low => OpenAiVerbosity::Low, VerbosityConfig::Medium => OpenAiVerbosity::Medium, VerbosityConfig::High => OpenAiVerbosity::High, } } } /// Request object that is serialized as JSON and POST'ed when using the /// Responses API. #[derive(Debug, Serialize)] pub(crate) struct ResponsesApiRequest<'a> { pub(crate) model: &'a str, pub(crate) instructions: &'a str, // TODO(mbolin): ResponseItem::Other should not be serialized. Currently, // we code defensively to avoid this case, but perhaps we should use a // separate enum for serialization. pub(crate) input: &'a Vec, pub(crate) tools: &'a [serde_json::Value], pub(crate) tool_choice: &'static str, pub(crate) parallel_tool_calls: bool, pub(crate) reasoning: Option, pub(crate) store: bool, pub(crate) stream: bool, pub(crate) include: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) prompt_cache_key: Option, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) text: Option, } pub(crate) fn create_reasoning_param_for_request( model_family: &ModelFamily, effort: Option, summary: ReasoningSummaryConfig, ) -> Option { if !model_family.supports_reasoning_summaries { return None; } Some(Reasoning { effort, summary: Some(summary), }) } pub(crate) fn create_text_param_for_request( verbosity: Option, output_schema: &Option, ) -> Option { if verbosity.is_none() && output_schema.is_none() { return None; } Some(TextControls { verbosity: verbosity.map(std::convert::Into::into), format: output_schema.as_ref().map(|schema| TextFormat { r#type: TextFormatType::JsonSchema, strict: true, schema: schema.clone(), name: "codex_output_schema".to_string(), }), }) } pub struct ResponseStream { pub(crate) rx_event: mpsc::Receiver>, } impl Stream for ResponseStream { type Item = Result; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { self.rx_event.poll_recv(cx) } } #[cfg(test)] mod tests { use crate::model_family::find_family_for_model; use pretty_assertions::assert_eq; use super::*; struct InstructionsTestCase { pub slug: &'static str, pub expects_apply_patch_instructions: bool, } #[test] fn get_full_instructions_no_user_content() { let prompt = Prompt { ..Default::default() }; let test_cases = vec![ InstructionsTestCase { slug: "gpt-3.5", expects_apply_patch_instructions: true, }, InstructionsTestCase { slug: "gpt-4.1", expects_apply_patch_instructions: true, }, InstructionsTestCase { slug: "gpt-4o", expects_apply_patch_instructions: true, }, InstructionsTestCase { slug: "gpt-5", expects_apply_patch_instructions: true, }, InstructionsTestCase { slug: "codex-mini-latest", expects_apply_patch_instructions: true, }, InstructionsTestCase { slug: "gpt-oss:120b", expects_apply_patch_instructions: false, }, InstructionsTestCase { slug: "gpt-5-codex", expects_apply_patch_instructions: false, }, ]; for test_case in test_cases { let model_family = find_family_for_model(test_case.slug).expect("known model slug"); let expected = if test_case.expects_apply_patch_instructions { format!( "{}\n{}", model_family.clone().base_instructions, APPLY_PATCH_TOOL_INSTRUCTIONS ) } else { model_family.clone().base_instructions }; let full = prompt.get_full_instructions(&model_family); assert_eq!(full, expected); } } #[test] fn serializes_text_verbosity_when_set() { let input: Vec = vec![]; let tools: Vec = vec![]; let req = ResponsesApiRequest { model: "gpt-5", instructions: "i", input: &input, tools: &tools, tool_choice: "auto", parallel_tool_calls: false, reasoning: None, store: false, stream: true, include: vec![], prompt_cache_key: None, text: Some(TextControls { verbosity: Some(OpenAiVerbosity::Low), format: None, }), }; let v = serde_json::to_value(&req).expect("json"); assert_eq!( v.get("text") .and_then(|t| t.get("verbosity")) .and_then(|s| s.as_str()), Some("low") ); } #[test] fn serializes_text_schema_with_strict_format() { let input: Vec = vec![]; let tools: Vec = vec![]; let schema = serde_json::json!({ "type": "object", "properties": { "answer": {"type": "string"} }, "required": ["answer"], }); let text_controls = create_text_param_for_request(None, &Some(schema.clone())).expect("text controls"); let req = ResponsesApiRequest { model: "gpt-5", instructions: "i", input: &input, tools: &tools, tool_choice: "auto", parallel_tool_calls: false, reasoning: None, store: false, stream: true, include: vec![], prompt_cache_key: None, text: Some(text_controls), }; let v = serde_json::to_value(&req).expect("json"); let text = v.get("text").expect("text field"); assert!(text.get("verbosity").is_none()); let format = text.get("format").expect("format field"); assert_eq!( format.get("name"), Some(&serde_json::Value::String("codex_output_schema".into())) ); assert_eq!( format.get("type"), Some(&serde_json::Value::String("json_schema".into())) ); assert_eq!(format.get("strict"), Some(&serde_json::Value::Bool(true))); assert_eq!(format.get("schema"), Some(&schema)); } #[test] fn omits_text_when_not_set() { let input: Vec = vec![]; let tools: Vec = vec![]; let req = ResponsesApiRequest { model: "gpt-5", instructions: "i", input: &input, tools: &tools, tool_choice: "auto", parallel_tool_calls: false, reasoning: None, store: false, stream: true, include: vec![], prompt_cache_key: None, text: None, }; let v = serde_json::to_value(&req).expect("json"); assert!(v.get("text").is_none()); } }