use crate::config_types::Verbosity as VerbosityConfig; use crate::error::Result; use crate::model_family::ModelFamily; use crate::models::ContentItem; use crate::models::ResponseItem; use crate::openai_tools::OpenAiTool; use crate::protocol::TokenUsage; use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS; use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig; use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig; use futures::Stream; use serde::Serialize; use std::borrow::Cow; use std::pin::Pin; use std::task::Context; use std::task::Poll; use tokio::sync::mpsc; /// The `instructions` field in the payload sent to a model should always start /// with this content. const BASE_INSTRUCTIONS: &str = include_str!("../prompt.md"); /// wraps user instructions message in a tag for the model to parse more easily. const USER_INSTRUCTIONS_START: &str = "\n\n"; const USER_INSTRUCTIONS_END: &str = "\n\n"; /// API request payload for a single model turn #[derive(Default, Debug, Clone)] pub struct Prompt { /// Conversation context input items. pub input: Vec, /// Whether to store response on server side (disable_response_storage = !store). pub store: bool, /// Tools available to the model, including additional tools sourced from /// external MCP servers. pub tools: Vec, /// Optional override for the built-in BASE_INSTRUCTIONS. pub base_instructions_override: Option, } impl Prompt { pub(crate) fn get_full_instructions(&self, model: &ModelFamily) -> Cow<'_, str> { let base = self .base_instructions_override .as_deref() .unwrap_or(BASE_INSTRUCTIONS); let mut sections: Vec<&str> = vec![base]; // When there are no custom instructions, add apply_patch if either: // - the model needs special instructions, or // - there is no apply_patch tool present let is_apply_patch_tool_present = self .tools .iter() .any(|t| matches!(t, OpenAiTool::Function(f) if f.name == "apply_patch")); if self.base_instructions_override.is_none() && (model.needs_special_apply_patch_instructions || !is_apply_patch_tool_present) { sections.push(APPLY_PATCH_TOOL_INSTRUCTIONS); } Cow::Owned(sections.join("\n")) } pub(crate) fn get_formatted_input(&self) -> Vec { self.input.clone() } /// Creates a formatted user instructions message from a string pub(crate) fn format_user_instructions_message(ui: &str) -> ResponseItem { ResponseItem::Message { id: None, role: "user".to_string(), content: vec![ContentItem::InputText { text: format!("{USER_INSTRUCTIONS_START}{ui}{USER_INSTRUCTIONS_END}"), }], } } } #[derive(Debug)] pub enum ResponseEvent { Created, OutputItemDone(ResponseItem), Completed { response_id: String, token_usage: Option, }, OutputTextDelta(String), ReasoningSummaryDelta(String), ReasoningContentDelta(String), ReasoningSummaryPartAdded, } #[derive(Debug, Serialize)] pub(crate) struct Reasoning { pub(crate) effort: ReasoningEffortConfig, pub(crate) summary: ReasoningSummaryConfig, } /// Controls under the `text` field in the Responses API for GPT-5. #[derive(Debug, Serialize, Default, Clone, Copy)] pub(crate) struct TextControls { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) verbosity: Option, } #[derive(Debug, Serialize, Default, Clone, Copy)] #[serde(rename_all = "lowercase")] pub(crate) enum OpenAiVerbosity { Low, #[default] Medium, High, } impl From for OpenAiVerbosity { fn from(v: VerbosityConfig) -> Self { match v { VerbosityConfig::Low => OpenAiVerbosity::Low, VerbosityConfig::Medium => OpenAiVerbosity::Medium, VerbosityConfig::High => OpenAiVerbosity::High, } } } /// Request object that is serialized as JSON and POST'ed when using the /// Responses API. #[derive(Debug, Serialize)] pub(crate) struct ResponsesApiRequest<'a> { pub(crate) model: &'a str, pub(crate) instructions: &'a str, // TODO(mbolin): ResponseItem::Other should not be serialized. Currently, // we code defensively to avoid this case, but perhaps we should use a // separate enum for serialization. pub(crate) input: &'a Vec, pub(crate) tools: &'a [serde_json::Value], pub(crate) tool_choice: &'static str, pub(crate) parallel_tool_calls: bool, pub(crate) reasoning: Option, /// true when using the Responses API. pub(crate) store: bool, pub(crate) stream: bool, pub(crate) include: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) prompt_cache_key: Option, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) text: Option, } pub(crate) fn create_reasoning_param_for_request( model_family: &ModelFamily, effort: ReasoningEffortConfig, summary: ReasoningSummaryConfig, ) -> Option { if model_family.supports_reasoning_summaries { Some(Reasoning { effort, summary }) } else { None } } pub(crate) fn create_text_param_for_request( verbosity: Option, ) -> Option { verbosity.map(|v| TextControls { verbosity: Some(v.into()), }) } pub(crate) struct ResponseStream { pub(crate) rx_event: mpsc::Receiver>, } impl Stream for ResponseStream { type Item = Result; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { self.rx_event.poll_recv(cx) } } #[cfg(test)] mod tests { use crate::model_family::find_family_for_model; use super::*; #[test] fn get_full_instructions_no_user_content() { let prompt = Prompt { ..Default::default() }; let expected = format!("{BASE_INSTRUCTIONS}\n{APPLY_PATCH_TOOL_INSTRUCTIONS}"); let model_family = find_family_for_model("gpt-4.1").expect("known model slug"); let full = prompt.get_full_instructions(&model_family); assert_eq!(full, expected); } #[test] fn serializes_text_verbosity_when_set() { let input: Vec = vec![]; let tools: Vec = vec![]; let req = ResponsesApiRequest { model: "gpt-5", instructions: "i", input: &input, tools: &tools, tool_choice: "auto", parallel_tool_calls: false, reasoning: None, store: true, stream: true, include: vec![], prompt_cache_key: None, text: Some(TextControls { verbosity: Some(OpenAiVerbosity::Low), }), }; let v = serde_json::to_value(&req).expect("json"); assert_eq!( v.get("text") .and_then(|t| t.get("verbosity")) .and_then(|s| s.as_str()), Some("low") ); } #[test] fn omits_text_when_not_set() { let input: Vec = vec![]; let tools: Vec = vec![]; let req = ResponsesApiRequest { model: "gpt-5", instructions: "i", input: &input, tools: &tools, tool_choice: "auto", parallel_tool_calls: false, reasoning: None, store: true, stream: true, include: vec![], prompt_cache_key: None, text: None, }; let v = serde_json::to_value(&req).expect("json"); assert!(v.get("text").is_none()); } }