Move models.rs to protocol (#2595)
Moving models.rs to protocol so we can use them in `Codex` operations
This commit is contained in:
3
codex-rs/Cargo.lock
generated
3
codex-rs/Cargo.lock
generated
@@ -923,13 +923,16 @@ dependencies = [
|
|||||||
name = "codex-protocol"
|
name = "codex-protocol"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"base64 0.22.1",
|
||||||
"mcp-types",
|
"mcp-types",
|
||||||
|
"mime_guess",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_bytes",
|
"serde_bytes",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"strum 0.27.2",
|
"strum 0.27.2",
|
||||||
"strum_macros 0.27.2",
|
"strum_macros 0.27.2",
|
||||||
|
"tracing",
|
||||||
"ts-rs",
|
"ts-rs",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
use crate::codex::Session;
|
use crate::codex::Session;
|
||||||
use crate::codex::TurnContext;
|
use crate::codex::TurnContext;
|
||||||
use crate::models::FunctionCallOutputPayload;
|
|
||||||
use crate::models::ResponseInputItem;
|
|
||||||
use crate::protocol::FileChange;
|
use crate::protocol::FileChange;
|
||||||
use crate::protocol::ReviewDecision;
|
use crate::protocol::ReviewDecision;
|
||||||
use crate::safety::SafetyCheck;
|
use crate::safety::SafetyCheck;
|
||||||
use crate::safety::assess_patch_safety;
|
use crate::safety::assess_patch_safety;
|
||||||
use codex_apply_patch::ApplyPatchAction;
|
use codex_apply_patch::ApplyPatchAction;
|
||||||
use codex_apply_patch::ApplyPatchFileChange;
|
use codex_apply_patch::ApplyPatchFileChange;
|
||||||
|
use codex_protocol::models::FunctionCallOutputPayload;
|
||||||
|
use codex_protocol::models::ResponseInputItem;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
|||||||
@@ -22,11 +22,11 @@ use crate::client_common::ResponseStream;
|
|||||||
use crate::error::CodexErr;
|
use crate::error::CodexErr;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::model_family::ModelFamily;
|
use crate::model_family::ModelFamily;
|
||||||
use crate::models::ContentItem;
|
|
||||||
use crate::models::ReasoningItemContent;
|
|
||||||
use crate::models::ResponseItem;
|
|
||||||
use crate::openai_tools::create_tools_json_for_chat_completions_api;
|
use crate::openai_tools::create_tools_json_for_chat_completions_api;
|
||||||
use crate::util::backoff;
|
use crate::util::backoff;
|
||||||
|
use codex_protocol::models::ContentItem;
|
||||||
|
use codex_protocol::models::ReasoningItemContent;
|
||||||
|
use codex_protocol::models::ResponseItem;
|
||||||
|
|
||||||
/// Implementation for the classic Chat Completions API.
|
/// Implementation for the classic Chat Completions API.
|
||||||
pub(crate) async fn stream_chat_completions(
|
pub(crate) async fn stream_chat_completions(
|
||||||
@@ -509,16 +509,19 @@ where
|
|||||||
// do NOT emit yet. Forward any other item (e.g. FunctionCall) right
|
// do NOT emit yet. Forward any other item (e.g. FunctionCall) right
|
||||||
// away so downstream consumers see it.
|
// away so downstream consumers see it.
|
||||||
|
|
||||||
let is_assistant_delta = matches!(&item, crate::models::ResponseItem::Message { role, .. } if role == "assistant");
|
let is_assistant_delta = matches!(&item, codex_protocol::models::ResponseItem::Message { role, .. } if role == "assistant");
|
||||||
|
|
||||||
if is_assistant_delta {
|
if is_assistant_delta {
|
||||||
// Only use the final assistant message if we have not
|
// Only use the final assistant message if we have not
|
||||||
// seen any deltas; otherwise, deltas already built the
|
// seen any deltas; otherwise, deltas already built the
|
||||||
// cumulative text and this would duplicate it.
|
// cumulative text and this would duplicate it.
|
||||||
if this.cumulative.is_empty()
|
if this.cumulative.is_empty()
|
||||||
&& let crate::models::ResponseItem::Message { content, .. } = &item
|
&& let codex_protocol::models::ResponseItem::Message { content, .. } =
|
||||||
|
&item
|
||||||
&& let Some(text) = content.iter().find_map(|c| match c {
|
&& let Some(text) = content.iter().find_map(|c| match c {
|
||||||
crate::models::ContentItem::OutputText { text } => Some(text),
|
codex_protocol::models::ContentItem::OutputText { text } => {
|
||||||
|
Some(text)
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
@@ -542,26 +545,27 @@ where
|
|||||||
if !this.cumulative_reasoning.is_empty()
|
if !this.cumulative_reasoning.is_empty()
|
||||||
&& matches!(this.mode, AggregateMode::AggregatedOnly)
|
&& matches!(this.mode, AggregateMode::AggregatedOnly)
|
||||||
{
|
{
|
||||||
let aggregated_reasoning = crate::models::ResponseItem::Reasoning {
|
let aggregated_reasoning =
|
||||||
id: String::new(),
|
codex_protocol::models::ResponseItem::Reasoning {
|
||||||
summary: Vec::new(),
|
id: String::new(),
|
||||||
content: Some(vec![
|
summary: Vec::new(),
|
||||||
crate::models::ReasoningItemContent::ReasoningText {
|
content: Some(vec![
|
||||||
text: std::mem::take(&mut this.cumulative_reasoning),
|
codex_protocol::models::ReasoningItemContent::ReasoningText {
|
||||||
},
|
text: std::mem::take(&mut this.cumulative_reasoning),
|
||||||
]),
|
},
|
||||||
encrypted_content: None,
|
]),
|
||||||
};
|
encrypted_content: None,
|
||||||
|
};
|
||||||
this.pending
|
this.pending
|
||||||
.push_back(ResponseEvent::OutputItemDone(aggregated_reasoning));
|
.push_back(ResponseEvent::OutputItemDone(aggregated_reasoning));
|
||||||
emitted_any = true;
|
emitted_any = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !this.cumulative.is_empty() {
|
if !this.cumulative.is_empty() {
|
||||||
let aggregated_message = crate::models::ResponseItem::Message {
|
let aggregated_message = codex_protocol::models::ResponseItem::Message {
|
||||||
id: None,
|
id: None,
|
||||||
role: "assistant".to_string(),
|
role: "assistant".to_string(),
|
||||||
content: vec![crate::models::ContentItem::OutputText {
|
content: vec![codex_protocol::models::ContentItem::OutputText {
|
||||||
text: std::mem::take(&mut this.cumulative),
|
text: std::mem::take(&mut this.cumulative),
|
||||||
}],
|
}],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -37,13 +37,13 @@ use crate::flags::CODEX_RS_SSE_FIXTURE;
|
|||||||
use crate::model_family::ModelFamily;
|
use crate::model_family::ModelFamily;
|
||||||
use crate::model_provider_info::ModelProviderInfo;
|
use crate::model_provider_info::ModelProviderInfo;
|
||||||
use crate::model_provider_info::WireApi;
|
use crate::model_provider_info::WireApi;
|
||||||
use crate::models::ResponseItem;
|
|
||||||
use crate::openai_tools::create_tools_json_for_responses_api;
|
use crate::openai_tools::create_tools_json_for_responses_api;
|
||||||
use crate::protocol::TokenUsage;
|
use crate::protocol::TokenUsage;
|
||||||
use crate::user_agent::get_codex_user_agent;
|
use crate::user_agent::get_codex_user_agent;
|
||||||
use crate::util::backoff;
|
use crate::util::backoff;
|
||||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||||
|
use codex_protocol::models::ResponseItem;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
use crate::config_types::Verbosity as VerbosityConfig;
|
use crate::config_types::Verbosity as VerbosityConfig;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::model_family::ModelFamily;
|
use crate::model_family::ModelFamily;
|
||||||
use crate::models::ContentItem;
|
|
||||||
use crate::models::ResponseItem;
|
|
||||||
use crate::openai_tools::OpenAiTool;
|
use crate::openai_tools::OpenAiTool;
|
||||||
use crate::protocol::TokenUsage;
|
use crate::protocol::TokenUsage;
|
||||||
use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS;
|
use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS;
|
||||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||||
|
use codex_protocol::models::ContentItem;
|
||||||
|
use codex_protocol::models::ResponseItem;
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|||||||
@@ -56,14 +56,6 @@ use crate::exec_env::create_env;
|
|||||||
use crate::mcp_connection_manager::McpConnectionManager;
|
use crate::mcp_connection_manager::McpConnectionManager;
|
||||||
use crate::mcp_tool_call::handle_mcp_tool_call;
|
use crate::mcp_tool_call::handle_mcp_tool_call;
|
||||||
use crate::model_family::find_family_for_model;
|
use crate::model_family::find_family_for_model;
|
||||||
use crate::models::ContentItem;
|
|
||||||
use crate::models::FunctionCallOutputPayload;
|
|
||||||
use crate::models::LocalShellAction;
|
|
||||||
use crate::models::ReasoningItemContent;
|
|
||||||
use crate::models::ReasoningItemReasoningSummary;
|
|
||||||
use crate::models::ResponseInputItem;
|
|
||||||
use crate::models::ResponseItem;
|
|
||||||
use crate::models::ShellToolCallParams;
|
|
||||||
use crate::openai_tools::ApplyPatchToolArgs;
|
use crate::openai_tools::ApplyPatchToolArgs;
|
||||||
use crate::openai_tools::ToolsConfig;
|
use crate::openai_tools::ToolsConfig;
|
||||||
use crate::openai_tools::get_openai_tools;
|
use crate::openai_tools::get_openai_tools;
|
||||||
@@ -108,6 +100,14 @@ use crate::user_notification::UserNotification;
|
|||||||
use crate::util::backoff;
|
use crate::util::backoff;
|
||||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||||
|
use codex_protocol::models::ContentItem;
|
||||||
|
use codex_protocol::models::FunctionCallOutputPayload;
|
||||||
|
use codex_protocol::models::LocalShellAction;
|
||||||
|
use codex_protocol::models::ReasoningItemContent;
|
||||||
|
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||||
|
use codex_protocol::models::ResponseInputItem;
|
||||||
|
use codex_protocol::models::ResponseItem;
|
||||||
|
use codex_protocol::models::ShellToolCallParams;
|
||||||
|
|
||||||
// A convenience extension trait for acquiring mutex locks where poisoning is
|
// A convenience extension trait for acquiring mutex locks where poisoning is
|
||||||
// unrecoverable and should abort the program. This avoids scattered `.unwrap()`
|
// unrecoverable and should abort the program. This avoids scattered `.unwrap()`
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use crate::models::ResponseItem;
|
use codex_protocol::models::ResponseItem;
|
||||||
|
|
||||||
/// Transcript of conversation history
|
/// Transcript of conversation history
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
@@ -66,7 +66,7 @@ impl ConversationHistory {
|
|||||||
self.items.push(ResponseItem::Message {
|
self.items.push(ResponseItem::Message {
|
||||||
id: None,
|
id: None,
|
||||||
role: "assistant".to_string(),
|
role: "assistant".to_string(),
|
||||||
content: vec![crate::models::ContentItem::OutputText {
|
content: vec![codex_protocol::models::ContentItem::OutputText {
|
||||||
text: delta.to_string(),
|
text: delta.to_string(),
|
||||||
}],
|
}],
|
||||||
});
|
});
|
||||||
@@ -120,11 +120,11 @@ fn is_api_message(message: &ResponseItem) -> bool {
|
|||||||
|
|
||||||
/// Helper to append the textual content from `src` into `dst` in place.
|
/// Helper to append the textual content from `src` into `dst` in place.
|
||||||
fn append_text_content(
|
fn append_text_content(
|
||||||
dst: &mut Vec<crate::models::ContentItem>,
|
dst: &mut Vec<codex_protocol::models::ContentItem>,
|
||||||
src: &Vec<crate::models::ContentItem>,
|
src: &Vec<codex_protocol::models::ContentItem>,
|
||||||
) {
|
) {
|
||||||
for c in src {
|
for c in src {
|
||||||
if let crate::models::ContentItem::OutputText { text } = c {
|
if let codex_protocol::models::ContentItem::OutputText { text } = c {
|
||||||
append_text_delta(dst, text);
|
append_text_delta(dst, text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -132,15 +132,15 @@ fn append_text_content(
|
|||||||
|
|
||||||
/// Append a single text delta to the last OutputText item in `content`, or
|
/// Append a single text delta to the last OutputText item in `content`, or
|
||||||
/// push a new OutputText item if none exists.
|
/// push a new OutputText item if none exists.
|
||||||
fn append_text_delta(content: &mut Vec<crate::models::ContentItem>, delta: &str) {
|
fn append_text_delta(content: &mut Vec<codex_protocol::models::ContentItem>, delta: &str) {
|
||||||
if let Some(crate::models::ContentItem::OutputText { text }) = content
|
if let Some(codex_protocol::models::ContentItem::OutputText { text }) = content
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.rev()
|
.rev()
|
||||||
.find(|c| matches!(c, crate::models::ContentItem::OutputText { .. }))
|
.find(|c| matches!(c, codex_protocol::models::ContentItem::OutputText { .. }))
|
||||||
{
|
{
|
||||||
text.push_str(delta);
|
text.push_str(delta);
|
||||||
} else {
|
} else {
|
||||||
content.push(crate::models::ContentItem::OutputText {
|
content.push(codex_protocol::models::ContentItem::OutputText {
|
||||||
text: delta.to_string(),
|
text: delta.to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -149,7 +149,7 @@ fn append_text_delta(content: &mut Vec<crate::models::ContentItem>, delta: &str)
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::models::ContentItem;
|
use codex_protocol::models::ContentItem;
|
||||||
|
|
||||||
fn assistant_msg(text: &str) -> ResponseItem {
|
fn assistant_msg(text: &str) -> ResponseItem {
|
||||||
ResponseItem::Message {
|
ResponseItem::Message {
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ use serde::Deserialize;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use strum_macros::Display as DeriveDisplay;
|
use strum_macros::Display as DeriveDisplay;
|
||||||
|
|
||||||
use crate::models::ContentItem;
|
|
||||||
use crate::models::ResponseItem;
|
|
||||||
use crate::protocol::AskForApproval;
|
use crate::protocol::AskForApproval;
|
||||||
use crate::protocol::SandboxPolicy;
|
use crate::protocol::SandboxPolicy;
|
||||||
use crate::shell::Shell;
|
use crate::shell::Shell;
|
||||||
use codex_protocol::config_types::SandboxMode;
|
use codex_protocol::config_types::SandboxMode;
|
||||||
|
use codex_protocol::models::ContentItem;
|
||||||
|
use codex_protocol::models::ResponseItem;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
/// wraps environment context message in a tag for the model to parse more easily.
|
/// wraps environment context message in a tag for the model to parse more easily.
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ mod conversation_manager;
|
|||||||
pub use conversation_manager::ConversationManager;
|
pub use conversation_manager::ConversationManager;
|
||||||
pub use conversation_manager::NewConversation;
|
pub use conversation_manager::NewConversation;
|
||||||
pub mod model_family;
|
pub mod model_family;
|
||||||
mod models;
|
|
||||||
mod openai_model_info;
|
mod openai_model_info;
|
||||||
mod openai_tools;
|
mod openai_tools;
|
||||||
pub mod plan_tool;
|
pub mod plan_tool;
|
||||||
|
|||||||
@@ -4,13 +4,13 @@ use std::time::Instant;
|
|||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
use crate::codex::Session;
|
use crate::codex::Session;
|
||||||
use crate::models::FunctionCallOutputPayload;
|
|
||||||
use crate::models::ResponseInputItem;
|
|
||||||
use crate::protocol::Event;
|
use crate::protocol::Event;
|
||||||
use crate::protocol::EventMsg;
|
use crate::protocol::EventMsg;
|
||||||
use crate::protocol::McpInvocation;
|
use crate::protocol::McpInvocation;
|
||||||
use crate::protocol::McpToolCallBeginEvent;
|
use crate::protocol::McpToolCallBeginEvent;
|
||||||
use crate::protocol::McpToolCallEndEvent;
|
use crate::protocol::McpToolCallEndEvent;
|
||||||
|
use codex_protocol::models::FunctionCallOutputPayload;
|
||||||
|
use codex_protocol::models::ResponseInputItem;
|
||||||
|
|
||||||
/// Handles the specified tool call dispatches the appropriate
|
/// Handles the specified tool call dispatches the appropriate
|
||||||
/// `McpToolCallBegin` and `McpToolCallEnd` events to the `Session`.
|
/// `McpToolCallBegin` and `McpToolCallEnd` events to the `Session`.
|
||||||
|
|||||||
@@ -2,13 +2,13 @@ use std::collections::BTreeMap;
|
|||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
use crate::codex::Session;
|
use crate::codex::Session;
|
||||||
use crate::models::FunctionCallOutputPayload;
|
|
||||||
use crate::models::ResponseInputItem;
|
|
||||||
use crate::openai_tools::JsonSchema;
|
use crate::openai_tools::JsonSchema;
|
||||||
use crate::openai_tools::OpenAiTool;
|
use crate::openai_tools::OpenAiTool;
|
||||||
use crate::openai_tools::ResponsesApiTool;
|
use crate::openai_tools::ResponsesApiTool;
|
||||||
use crate::protocol::Event;
|
use crate::protocol::Event;
|
||||||
use crate::protocol::EventMsg;
|
use crate::protocol::EventMsg;
|
||||||
|
use codex_protocol::models::FunctionCallOutputPayload;
|
||||||
|
use codex_protocol::models::ResponseInputItem;
|
||||||
|
|
||||||
// Use the canonical plan tool types from the protocol crate to ensure
|
// Use the canonical plan tool types from the protocol crate to ensure
|
||||||
// type-identity matches events transported via `codex_protocol`.
|
// type-identity matches events transported via `codex_protocol`.
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ use uuid::Uuid;
|
|||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
use crate::git_info::GitInfo;
|
use crate::git_info::GitInfo;
|
||||||
use crate::git_info::collect_git_info;
|
use crate::git_info::collect_git_info;
|
||||||
use crate::models::ResponseItem;
|
use codex_protocol::models::ResponseItem;
|
||||||
|
|
||||||
const SESSIONS_SUBDIR: &str = "sessions";
|
const SESSIONS_SUBDIR: &str = "sessions";
|
||||||
|
|
||||||
|
|||||||
@@ -11,12 +11,15 @@ path = "src/lib.rs"
|
|||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
base64 = "0.22.1"
|
||||||
mcp-types = { path = "../mcp-types" }
|
mcp-types = { path = "../mcp-types" }
|
||||||
|
mime_guess = "2.0.5"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_bytes = "0.11"
|
serde_bytes = "0.11"
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
strum = "0.27.2"
|
strum = "0.27.2"
|
||||||
strum_macros = "0.27.2"
|
strum_macros = "0.27.2"
|
||||||
|
tracing = "0.1.41"
|
||||||
ts-rs = { version = "11", features = ["uuid-impl", "serde-json-impl"] }
|
ts-rs = { version = "11", features = ["uuid-impl", "serde-json-impl"] }
|
||||||
uuid = { version = "1", features = ["serde", "v4"] }
|
uuid = { version = "1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
pub mod config_types;
|
pub mod config_types;
|
||||||
pub mod mcp_protocol;
|
pub mod mcp_protocol;
|
||||||
pub mod message_history;
|
pub mod message_history;
|
||||||
|
pub mod models;
|
||||||
pub mod parse_command;
|
pub mod parse_command;
|
||||||
pub mod plan_tool;
|
pub mod plan_tool;
|
||||||
pub mod protocol;
|
pub mod protocol;
|
||||||
|
|||||||
@@ -204,7 +204,6 @@ impl From<Vec<InputItem>> for ResponseInputItem {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => None,
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<ContentItem>>(),
|
.collect::<Vec<ContentItem>>(),
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user