Files
llmx/codex-rs/core/tests/common/lib.rs

363 lines
11 KiB
Rust
Raw Normal View History

#![expect(clippy::expect_used)]
use tempfile::TempDir;
chore: introduce ConversationManager as a clearinghouse for all conversations (#2240) This PR does two things because after I got deep into the first one I started pulling on the thread to the second: - Makes `ConversationManager` the place where all in-memory conversations are created and stored. Previously, `MessageProcessor` in the `codex-mcp-server` crate was doing this via its `session_map`, but this is something that should be done in `codex-core`. - It unwinds the `ctrl_c: tokio::sync::Notify` that was threaded throughout our code. I think this made sense at one time, but now that we handle Ctrl-C within the TUI and have a proper `Op::Interrupt` event, I don't think this was quite right, so I removed it. For `codex exec` and `codex proto`, we now use `tokio::signal::ctrl_c()` directly, but we no longer make `Notify` a field of `Codex` or `CodexConversation`. Changes of note: - Adds the files `conversation_manager.rs` and `codex_conversation.rs` to `codex-core`. - `Codex` and `CodexSpawnOk` are no longer exported from `codex-core`: other crates must use `CodexConversation` instead (which is created via `ConversationManager`). - `core/src/codex_wrapper.rs` has been deleted in favor of `ConversationManager`. - `ConversationManager::new_conversation()` returns `NewConversation`, which is in line with the `new_conversation` tool we want to add to the MCP server. Note `NewConversation` includes `SessionConfiguredEvent`, so we eliminate checks in cases like `codex-rs/core/tests/client.rs` to verify `SessionConfiguredEvent` is the first event because that is now internal to `ConversationManager`. - Quite a bit of code was deleted from `codex-rs/mcp-server/src/message_processor.rs` since it no longer has to manage multiple conversations itself: it goes through `ConversationManager` instead. - `core/tests/live_agent.rs` has been deleted because I had to update a bunch of tests and all the tests in here were ignored, and I don't think anyone ever ran them, so this was just technical debt, at this point. - Removed `notify_on_sigint()` from `util.rs` (and in a follow-up, I hope to refactor the blandly-named `util.rs` into more descriptive files). - In general, I started replacing local variables named `codex` as `conversation`, where appropriate, though admittedly I didn't do it through all the integration tests because that would have added a lot of noise to this PR. --- [//]: # (BEGIN SAPLING FOOTER) Stack created with [Sapling](https://sapling-scm.com). Best reviewed with [ReviewStack](https://reviewstack.dev/openai/codex/pull/2240). * #2264 * #2263 * __->__ #2240
2025-08-13 13:38:18 -07:00
use codex_core::CodexConversation;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use regex_lite::Regex;
#[cfg(target_os = "linux")]
use assert_cmd::cargo::cargo_bin;
pub mod responses;
pub mod test_codex;
pub mod test_codex_exec;
#[track_caller]
pub fn assert_regex_match<'s>(pattern: &str, actual: &'s str) -> regex_lite::Captures<'s> {
let regex = Regex::new(pattern).unwrap_or_else(|err| {
panic!("failed to compile regex {pattern:?}: {err}");
});
regex
.captures(actual)
.unwrap_or_else(|| panic!("regex {pattern:?} did not match {actual:?}"))
}
/// Returns a default `Config` whose on-disk state is confined to the provided
/// temporary directory. Using a per-test directory keeps tests hermetic and
/// avoids clobbering a developers real `~/.codex`.
pub fn load_default_config_for_test(codex_home: &TempDir) -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
default_test_overrides(),
codex_home.path().to_path_buf(),
)
.expect("defaults for test should always succeed")
}
#[cfg(target_os = "linux")]
fn default_test_overrides() -> ConfigOverrides {
ConfigOverrides {
codex_linux_sandbox_exe: Some(cargo_bin("codex-linux-sandbox")),
..ConfigOverrides::default()
}
}
#[cfg(not(target_os = "linux"))]
fn default_test_overrides() -> ConfigOverrides {
ConfigOverrides::default()
}
/// Builds an SSE stream body from a JSON fixture.
///
/// The fixture must contain an array of objects where each object represents a
/// single SSE event with at least a `type` field matching the `event:` value.
/// Additional fields become the JSON payload for the `data:` line. An object
/// with only a `type` field results in an event with no `data:` section. This
/// makes it trivial to extend the fixtures as OpenAI adds new event kinds or
/// fields.
pub fn load_sse_fixture(path: impl AsRef<std::path::Path>) -> String {
let events: Vec<serde_json::Value> =
serde_json::from_reader(std::fs::File::open(path).expect("read fixture"))
.expect("parse JSON fixture");
events
.into_iter()
.map(|e| {
let kind = e
.get("type")
.and_then(|v| v.as_str())
.expect("fixture event missing type");
if e.as_object().map(|o| o.len() == 1).unwrap_or(false) {
format!("event: {kind}\n\n")
} else {
format!("event: {kind}\ndata: {e}\n\n")
}
})
.collect()
}
pub fn load_sse_fixture_with_id_from_str(raw: &str, id: &str) -> String {
let replaced = raw.replace("__ID__", id);
let events: Vec<serde_json::Value> =
serde_json::from_str(&replaced).expect("parse JSON fixture");
events
.into_iter()
.map(|e| {
let kind = e
.get("type")
.and_then(|v| v.as_str())
.expect("fixture event missing type");
if e.as_object().map(|o| o.len() == 1).unwrap_or(false) {
format!("event: {kind}\n\n")
} else {
format!("event: {kind}\ndata: {e}\n\n")
}
})
.collect()
}
/// Same as [`load_sse_fixture`], but replaces the placeholder `__ID__` in the
/// fixture template with the supplied identifier before parsing. This lets a
/// single JSON template be reused by multiple tests that each need a unique
/// `response_id`.
pub fn load_sse_fixture_with_id(path: impl AsRef<std::path::Path>, id: &str) -> String {
let raw = std::fs::read_to_string(path).expect("read fixture template");
let replaced = raw.replace("__ID__", id);
let events: Vec<serde_json::Value> =
serde_json::from_str(&replaced).expect("parse JSON fixture");
events
.into_iter()
.map(|e| {
let kind = e
.get("type")
.and_then(|v| v.as_str())
.expect("fixture event missing type");
if e.as_object().map(|o| o.len() == 1).unwrap_or(false) {
format!("event: {kind}\n\n")
} else {
format!("event: {kind}\ndata: {e}\n\n")
}
})
.collect()
}
pub async fn wait_for_event<F>(
chore: introduce ConversationManager as a clearinghouse for all conversations (#2240) This PR does two things because after I got deep into the first one I started pulling on the thread to the second: - Makes `ConversationManager` the place where all in-memory conversations are created and stored. Previously, `MessageProcessor` in the `codex-mcp-server` crate was doing this via its `session_map`, but this is something that should be done in `codex-core`. - It unwinds the `ctrl_c: tokio::sync::Notify` that was threaded throughout our code. I think this made sense at one time, but now that we handle Ctrl-C within the TUI and have a proper `Op::Interrupt` event, I don't think this was quite right, so I removed it. For `codex exec` and `codex proto`, we now use `tokio::signal::ctrl_c()` directly, but we no longer make `Notify` a field of `Codex` or `CodexConversation`. Changes of note: - Adds the files `conversation_manager.rs` and `codex_conversation.rs` to `codex-core`. - `Codex` and `CodexSpawnOk` are no longer exported from `codex-core`: other crates must use `CodexConversation` instead (which is created via `ConversationManager`). - `core/src/codex_wrapper.rs` has been deleted in favor of `ConversationManager`. - `ConversationManager::new_conversation()` returns `NewConversation`, which is in line with the `new_conversation` tool we want to add to the MCP server. Note `NewConversation` includes `SessionConfiguredEvent`, so we eliminate checks in cases like `codex-rs/core/tests/client.rs` to verify `SessionConfiguredEvent` is the first event because that is now internal to `ConversationManager`. - Quite a bit of code was deleted from `codex-rs/mcp-server/src/message_processor.rs` since it no longer has to manage multiple conversations itself: it goes through `ConversationManager` instead. - `core/tests/live_agent.rs` has been deleted because I had to update a bunch of tests and all the tests in here were ignored, and I don't think anyone ever ran them, so this was just technical debt, at this point. - Removed `notify_on_sigint()` from `util.rs` (and in a follow-up, I hope to refactor the blandly-named `util.rs` into more descriptive files). - In general, I started replacing local variables named `codex` as `conversation`, where appropriate, though admittedly I didn't do it through all the integration tests because that would have added a lot of noise to this PR. --- [//]: # (BEGIN SAPLING FOOTER) Stack created with [Sapling](https://sapling-scm.com). Best reviewed with [ReviewStack](https://reviewstack.dev/openai/codex/pull/2240). * #2264 * #2263 * __->__ #2240
2025-08-13 13:38:18 -07:00
codex: &CodexConversation,
predicate: F,
) -> codex_core::protocol::EventMsg
where
F: FnMut(&codex_core::protocol::EventMsg) -> bool,
{
use tokio::time::Duration;
wait_for_event_with_timeout(codex, predicate, Duration::from_secs(1)).await
}
pub async fn wait_for_event_match<T, F>(codex: &CodexConversation, matcher: F) -> T
where
F: Fn(&codex_core::protocol::EventMsg) -> Option<T>,
{
let ev = wait_for_event(codex, |ev| matcher(ev).is_some()).await;
matcher(&ev).unwrap()
}
pub async fn wait_for_event_with_timeout<F>(
chore: introduce ConversationManager as a clearinghouse for all conversations (#2240) This PR does two things because after I got deep into the first one I started pulling on the thread to the second: - Makes `ConversationManager` the place where all in-memory conversations are created and stored. Previously, `MessageProcessor` in the `codex-mcp-server` crate was doing this via its `session_map`, but this is something that should be done in `codex-core`. - It unwinds the `ctrl_c: tokio::sync::Notify` that was threaded throughout our code. I think this made sense at one time, but now that we handle Ctrl-C within the TUI and have a proper `Op::Interrupt` event, I don't think this was quite right, so I removed it. For `codex exec` and `codex proto`, we now use `tokio::signal::ctrl_c()` directly, but we no longer make `Notify` a field of `Codex` or `CodexConversation`. Changes of note: - Adds the files `conversation_manager.rs` and `codex_conversation.rs` to `codex-core`. - `Codex` and `CodexSpawnOk` are no longer exported from `codex-core`: other crates must use `CodexConversation` instead (which is created via `ConversationManager`). - `core/src/codex_wrapper.rs` has been deleted in favor of `ConversationManager`. - `ConversationManager::new_conversation()` returns `NewConversation`, which is in line with the `new_conversation` tool we want to add to the MCP server. Note `NewConversation` includes `SessionConfiguredEvent`, so we eliminate checks in cases like `codex-rs/core/tests/client.rs` to verify `SessionConfiguredEvent` is the first event because that is now internal to `ConversationManager`. - Quite a bit of code was deleted from `codex-rs/mcp-server/src/message_processor.rs` since it no longer has to manage multiple conversations itself: it goes through `ConversationManager` instead. - `core/tests/live_agent.rs` has been deleted because I had to update a bunch of tests and all the tests in here were ignored, and I don't think anyone ever ran them, so this was just technical debt, at this point. - Removed `notify_on_sigint()` from `util.rs` (and in a follow-up, I hope to refactor the blandly-named `util.rs` into more descriptive files). - In general, I started replacing local variables named `codex` as `conversation`, where appropriate, though admittedly I didn't do it through all the integration tests because that would have added a lot of noise to this PR. --- [//]: # (BEGIN SAPLING FOOTER) Stack created with [Sapling](https://sapling-scm.com). Best reviewed with [ReviewStack](https://reviewstack.dev/openai/codex/pull/2240). * #2264 * #2263 * __->__ #2240
2025-08-13 13:38:18 -07:00
codex: &CodexConversation,
mut predicate: F,
wait_time: tokio::time::Duration,
) -> codex_core::protocol::EventMsg
where
F: FnMut(&codex_core::protocol::EventMsg) -> bool,
{
use tokio::time::Duration;
use tokio::time::timeout;
loop {
// Allow a bit more time to accommodate async startup work (e.g. config IO, tool discovery)
let ev = timeout(wait_time.max(Duration::from_secs(5)), codex.next_event())
.await
.expect("timeout waiting for event")
.expect("stream ended unexpectedly");
if predicate(&ev.msg) {
return ev.msg;
}
}
}
pub fn sandbox_env_var() -> &'static str {
codex_core::spawn::CODEX_SANDBOX_ENV_VAR
}
pub fn sandbox_network_env_var() -> &'static str {
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
}
pub mod fs_wait {
use anyhow::Result;
use anyhow::anyhow;
use notify::RecursiveMode;
use notify::Watcher;
use std::path::Path;
use std::path::PathBuf;
use std::sync::mpsc;
use std::sync::mpsc::RecvTimeoutError;
use std::time::Duration;
use std::time::Instant;
use tokio::task;
use walkdir::WalkDir;
pub async fn wait_for_path_exists(
path: impl Into<PathBuf>,
timeout: Duration,
) -> Result<PathBuf> {
let path = path.into();
task::spawn_blocking(move || wait_for_path_exists_blocking(path, timeout)).await?
}
pub async fn wait_for_matching_file(
root: impl Into<PathBuf>,
timeout: Duration,
predicate: impl FnMut(&Path) -> bool + Send + 'static,
) -> Result<PathBuf> {
let root = root.into();
task::spawn_blocking(move || {
let mut predicate = predicate;
blocking_find_matching_file(root, timeout, &mut predicate)
})
.await?
}
fn wait_for_path_exists_blocking(path: PathBuf, timeout: Duration) -> Result<PathBuf> {
if path.exists() {
return Ok(path);
}
let watch_root = nearest_existing_ancestor(&path);
let (tx, rx) = mpsc::channel();
let mut watcher = notify::recommended_watcher(move |res| {
let _ = tx.send(res);
})?;
watcher.watch(&watch_root, RecursiveMode::Recursive)?;
let deadline = Instant::now() + timeout;
loop {
if path.exists() {
return Ok(path.clone());
}
let now = Instant::now();
if now >= deadline {
break;
}
let remaining = deadline.saturating_duration_since(now);
match rx.recv_timeout(remaining) {
Ok(Ok(_event)) => {
if path.exists() {
return Ok(path.clone());
}
}
Ok(Err(err)) => return Err(err.into()),
Err(RecvTimeoutError::Timeout) => break,
Err(RecvTimeoutError::Disconnected) => break,
}
}
if path.exists() {
Ok(path)
} else {
Err(anyhow!("timed out waiting for {path:?}"))
}
}
fn blocking_find_matching_file(
root: PathBuf,
timeout: Duration,
predicate: &mut impl FnMut(&Path) -> bool,
) -> Result<PathBuf> {
let root = wait_for_path_exists_blocking(root, timeout)?;
if let Some(found) = scan_for_match(&root, predicate) {
return Ok(found);
}
let (tx, rx) = mpsc::channel();
let mut watcher = notify::recommended_watcher(move |res| {
let _ = tx.send(res);
})?;
watcher.watch(&root, RecursiveMode::Recursive)?;
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
let remaining = deadline.saturating_duration_since(Instant::now());
match rx.recv_timeout(remaining) {
Ok(Ok(_event)) => {
if let Some(found) = scan_for_match(&root, predicate) {
return Ok(found);
}
}
Ok(Err(err)) => return Err(err.into()),
Err(RecvTimeoutError::Timeout) => break,
Err(RecvTimeoutError::Disconnected) => break,
}
}
if let Some(found) = scan_for_match(&root, predicate) {
Ok(found)
} else {
Err(anyhow!("timed out waiting for matching file in {root:?}"))
}
}
fn scan_for_match(root: &Path, predicate: &mut impl FnMut(&Path) -> bool) -> Option<PathBuf> {
for entry in WalkDir::new(root).into_iter().filter_map(Result::ok) {
let path = entry.path();
if !entry.file_type().is_file() {
continue;
}
if predicate(path) {
return Some(path.to_path_buf());
}
}
None
}
fn nearest_existing_ancestor(path: &Path) -> PathBuf {
let mut current = path;
loop {
if current.exists() {
return current.to_path_buf();
}
match current.parent() {
Some(parent) => current = parent,
None => return PathBuf::from("."),
}
}
}
}
#[macro_export]
macro_rules! skip_if_sandbox {
() => {{
if ::std::env::var($crate::sandbox_env_var())
== ::core::result::Result::Ok("seatbelt".to_string())
{
eprintln!(
"{} is set to 'seatbelt', skipping test.",
$crate::sandbox_env_var()
);
return;
}
}};
($return_value:expr $(,)?) => {{
if ::std::env::var($crate::sandbox_env_var())
== ::core::result::Result::Ok("seatbelt".to_string())
{
eprintln!(
"{} is set to 'seatbelt', skipping test.",
$crate::sandbox_env_var()
);
return $return_value;
}
}};
}
#[macro_export]
macro_rules! skip_if_no_network {
() => {{
if ::std::env::var($crate::sandbox_network_env_var()).is_ok() {
println!(
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
);
return;
}
}};
($return_value:expr $(,)?) => {{
if ::std::env::var($crate::sandbox_network_env_var()).is_ok() {
println!(
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
);
return $return_value;
}
}};
}