Because conversations that use the Responses API can have encrypted
reasoning messages, trying to resume a conversation with a different
provider could lead to confusing "failed to decrypt" errors. (This is
reproducible by starting a conversation using ChatGPT login and resuming
it as a conversation that uses OpenAI models via Azure.)
This changes `ListConversationsParams` to take a `model_providers:
Option<Vec<String>>` and adds `model_provider` on each
`ConversationSummary` it returns so these cases can be disambiguated.
Note this ended up making changes to
`codex-rs/core/src/rollout/tests.rs` because it had a number of cases
where it expected `Some` for the value of `next_cursor`, but the list of
rollouts was complete, so according to this docstring:
bcd64c7e72/codex-rs/app-server-protocol/src/protocol.rs (L334-L337)
If there are no more items to return, then `next_cursor` should be
`None`. This PR updates that logic.
---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/5658).
* #5803
* #5793
* __->__ #5658
309 lines
11 KiB
Rust
309 lines
11 KiB
Rust
use std::fs;
|
|
use std::path::Path;
|
|
|
|
use app_test_support::McpProcess;
|
|
use app_test_support::to_response;
|
|
use codex_app_server_protocol::JSONRPCNotification;
|
|
use codex_app_server_protocol::JSONRPCResponse;
|
|
use codex_app_server_protocol::ListConversationsParams;
|
|
use codex_app_server_protocol::ListConversationsResponse;
|
|
use codex_app_server_protocol::NewConversationParams; // reused for overrides shape
|
|
use codex_app_server_protocol::RequestId;
|
|
use codex_app_server_protocol::ResumeConversationParams;
|
|
use codex_app_server_protocol::ResumeConversationResponse;
|
|
use codex_app_server_protocol::ServerNotification;
|
|
use codex_app_server_protocol::SessionConfiguredNotification;
|
|
use pretty_assertions::assert_eq;
|
|
use serde_json::json;
|
|
use tempfile::TempDir;
|
|
use tokio::time::timeout;
|
|
use uuid::Uuid;
|
|
|
|
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
|
async fn test_list_and_resume_conversations() {
|
|
// Prepare a temporary CODEX_HOME with a few fake rollout files.
|
|
let codex_home = TempDir::new().expect("create temp dir");
|
|
create_fake_rollout(
|
|
codex_home.path(),
|
|
"2025-01-02T12-00-00",
|
|
"2025-01-02T12:00:00Z",
|
|
"Hello A",
|
|
Some("openai"),
|
|
);
|
|
create_fake_rollout(
|
|
codex_home.path(),
|
|
"2025-01-01T13-00-00",
|
|
"2025-01-01T13:00:00Z",
|
|
"Hello B",
|
|
Some("openai"),
|
|
);
|
|
create_fake_rollout(
|
|
codex_home.path(),
|
|
"2025-01-01T12-00-00",
|
|
"2025-01-01T12:00:00Z",
|
|
"Hello C",
|
|
None,
|
|
);
|
|
|
|
let mut mcp = McpProcess::new(codex_home.path())
|
|
.await
|
|
.expect("spawn mcp process");
|
|
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
|
.await
|
|
.expect("init timeout")
|
|
.expect("init failed");
|
|
|
|
// Request first page with size 2
|
|
let req_id = mcp
|
|
.send_list_conversations_request(ListConversationsParams {
|
|
page_size: Some(2),
|
|
cursor: None,
|
|
model_providers: None,
|
|
})
|
|
.await
|
|
.expect("send listConversations");
|
|
let resp: JSONRPCResponse = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
|
)
|
|
.await
|
|
.expect("listConversations timeout")
|
|
.expect("listConversations resp");
|
|
let ListConversationsResponse { items, next_cursor } =
|
|
to_response::<ListConversationsResponse>(resp).expect("deserialize response");
|
|
|
|
assert_eq!(items.len(), 2);
|
|
// Newest first; preview text should match
|
|
assert_eq!(items[0].preview, "Hello A");
|
|
assert_eq!(items[1].preview, "Hello B");
|
|
assert_eq!(items[0].model_provider, "openai");
|
|
assert_eq!(items[1].model_provider, "openai");
|
|
assert!(items[0].path.is_absolute());
|
|
assert!(next_cursor.is_some());
|
|
|
|
// Request the next page using the cursor
|
|
let req_id2 = mcp
|
|
.send_list_conversations_request(ListConversationsParams {
|
|
page_size: Some(2),
|
|
cursor: next_cursor,
|
|
model_providers: None,
|
|
})
|
|
.await
|
|
.expect("send listConversations page 2");
|
|
let resp2: JSONRPCResponse = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_response_message(RequestId::Integer(req_id2)),
|
|
)
|
|
.await
|
|
.expect("listConversations page 2 timeout")
|
|
.expect("listConversations page 2 resp");
|
|
let ListConversationsResponse {
|
|
items: items2,
|
|
next_cursor: next2,
|
|
..
|
|
} = to_response::<ListConversationsResponse>(resp2).expect("deserialize response");
|
|
assert_eq!(items2.len(), 1);
|
|
assert_eq!(items2[0].preview, "Hello C");
|
|
assert_eq!(items2[0].model_provider, "openai");
|
|
assert_eq!(next2, None);
|
|
|
|
// Add a conversation with an explicit non-OpenAI provider for filter tests.
|
|
create_fake_rollout(
|
|
codex_home.path(),
|
|
"2025-01-01T11-30-00",
|
|
"2025-01-01T11:30:00Z",
|
|
"Hello TP",
|
|
Some("test-provider"),
|
|
);
|
|
|
|
// Filtering by model provider should return only matching sessions.
|
|
let filter_req_id = mcp
|
|
.send_list_conversations_request(ListConversationsParams {
|
|
page_size: Some(10),
|
|
cursor: None,
|
|
model_providers: Some(vec!["test-provider".to_string()]),
|
|
})
|
|
.await
|
|
.expect("send listConversations filtered");
|
|
let filter_resp: JSONRPCResponse = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_response_message(RequestId::Integer(filter_req_id)),
|
|
)
|
|
.await
|
|
.expect("listConversations filtered timeout")
|
|
.expect("listConversations filtered resp");
|
|
let ListConversationsResponse {
|
|
items: filtered_items,
|
|
next_cursor: filtered_next,
|
|
} = to_response::<ListConversationsResponse>(filter_resp).expect("deserialize filtered");
|
|
assert_eq!(filtered_items.len(), 1);
|
|
assert_eq!(filtered_next, None);
|
|
assert_eq!(filtered_items[0].preview, "Hello TP");
|
|
assert_eq!(filtered_items[0].model_provider, "test-provider");
|
|
|
|
// Empty filter should include every session regardless of provider metadata.
|
|
let unfiltered_req_id = mcp
|
|
.send_list_conversations_request(ListConversationsParams {
|
|
page_size: Some(10),
|
|
cursor: None,
|
|
model_providers: Some(Vec::new()),
|
|
})
|
|
.await
|
|
.expect("send listConversations unfiltered");
|
|
let unfiltered_resp: JSONRPCResponse = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_response_message(RequestId::Integer(unfiltered_req_id)),
|
|
)
|
|
.await
|
|
.expect("listConversations unfiltered timeout")
|
|
.expect("listConversations unfiltered resp");
|
|
let ListConversationsResponse {
|
|
items: unfiltered_items,
|
|
next_cursor: unfiltered_next,
|
|
} = to_response::<ListConversationsResponse>(unfiltered_resp)
|
|
.expect("deserialize unfiltered response");
|
|
assert_eq!(unfiltered_items.len(), 4);
|
|
assert!(unfiltered_next.is_none());
|
|
|
|
let empty_req_id = mcp
|
|
.send_list_conversations_request(ListConversationsParams {
|
|
page_size: Some(10),
|
|
cursor: None,
|
|
model_providers: Some(vec!["other".to_string()]),
|
|
})
|
|
.await
|
|
.expect("send listConversations filtered empty");
|
|
let empty_resp: JSONRPCResponse = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_response_message(RequestId::Integer(empty_req_id)),
|
|
)
|
|
.await
|
|
.expect("listConversations filtered empty timeout")
|
|
.expect("listConversations filtered empty resp");
|
|
let ListConversationsResponse {
|
|
items: empty_items,
|
|
next_cursor: empty_next,
|
|
} = to_response::<ListConversationsResponse>(empty_resp).expect("deserialize filtered empty");
|
|
assert!(empty_items.is_empty());
|
|
assert!(empty_next.is_none());
|
|
|
|
// Now resume one of the sessions and expect a SessionConfigured notification and response.
|
|
let resume_req_id = mcp
|
|
.send_resume_conversation_request(ResumeConversationParams {
|
|
path: items[0].path.clone(),
|
|
overrides: Some(NewConversationParams {
|
|
model: Some("o3".to_string()),
|
|
..Default::default()
|
|
}),
|
|
})
|
|
.await
|
|
.expect("send resumeConversation");
|
|
|
|
// Expect a codex/event notification with msg.type == sessionConfigured
|
|
let notification: JSONRPCNotification = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_notification_message("sessionConfigured"),
|
|
)
|
|
.await
|
|
.expect("sessionConfigured notification timeout")
|
|
.expect("sessionConfigured notification");
|
|
let session_configured: ServerNotification = notification
|
|
.try_into()
|
|
.expect("deserialize sessionConfigured notification");
|
|
// Basic shape assertion: ensure event type is sessionConfigured
|
|
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
|
model,
|
|
rollout_path,
|
|
..
|
|
}) = session_configured
|
|
else {
|
|
unreachable!("expected sessionConfigured notification");
|
|
};
|
|
assert_eq!(model, "o3");
|
|
assert_eq!(items[0].path.clone(), rollout_path);
|
|
|
|
// Then the response for resumeConversation
|
|
let resume_resp: JSONRPCResponse = timeout(
|
|
DEFAULT_READ_TIMEOUT,
|
|
mcp.read_stream_until_response_message(RequestId::Integer(resume_req_id)),
|
|
)
|
|
.await
|
|
.expect("resumeConversation timeout")
|
|
.expect("resumeConversation resp");
|
|
let ResumeConversationResponse {
|
|
conversation_id, ..
|
|
} = to_response::<ResumeConversationResponse>(resume_resp)
|
|
.expect("deserialize resumeConversation response");
|
|
// conversation id should be a valid UUID
|
|
assert!(!conversation_id.to_string().is_empty());
|
|
}
|
|
|
|
fn create_fake_rollout(
|
|
codex_home: &Path,
|
|
filename_ts: &str,
|
|
meta_rfc3339: &str,
|
|
preview: &str,
|
|
model_provider: Option<&str>,
|
|
) {
|
|
let uuid = Uuid::new_v4();
|
|
// sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
|
let year = &filename_ts[0..4];
|
|
let month = &filename_ts[5..7];
|
|
let day = &filename_ts[8..10];
|
|
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
|
fs::create_dir_all(&dir).unwrap_or_else(|e| panic!("create sessions dir: {e}"));
|
|
|
|
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
|
let mut lines = Vec::new();
|
|
// Meta line with timestamp (flattened meta in payload for new schema)
|
|
let mut payload = json!({
|
|
"id": uuid,
|
|
"timestamp": meta_rfc3339,
|
|
"cwd": "/",
|
|
"originator": "codex",
|
|
"cli_version": "0.0.0",
|
|
"instructions": null,
|
|
});
|
|
if let Some(provider) = model_provider {
|
|
payload["model_provider"] = json!(provider);
|
|
}
|
|
lines.push(
|
|
json!({
|
|
"timestamp": meta_rfc3339,
|
|
"type": "session_meta",
|
|
"payload": payload
|
|
})
|
|
.to_string(),
|
|
);
|
|
// Minimal user message entry as a persisted response item (with envelope timestamp)
|
|
lines.push(
|
|
json!({
|
|
"timestamp": meta_rfc3339,
|
|
"type":"response_item",
|
|
"payload": {
|
|
"type":"message",
|
|
"role":"user",
|
|
"content":[{"type":"input_text","text": preview}]
|
|
}
|
|
})
|
|
.to_string(),
|
|
);
|
|
// Add a matching user message event line to satisfy filters
|
|
lines.push(
|
|
json!({
|
|
"timestamp": meta_rfc3339,
|
|
"type":"event_msg",
|
|
"payload": {
|
|
"type":"user_message",
|
|
"message": preview,
|
|
"kind": "plain"
|
|
}
|
|
})
|
|
.to_string(),
|
|
);
|
|
fs::write(file_path, lines.join("\n") + "\n")
|
|
.unwrap_or_else(|e| panic!("write rollout file: {e}"));
|
|
}
|