feat: annotate conversations with model_provider for filtering (#5658)

Because conversations that use the Responses API can have encrypted
reasoning messages, trying to resume a conversation with a different
provider could lead to confusing "failed to decrypt" errors. (This is
reproducible by starting a conversation using ChatGPT login and resuming
it as a conversation that uses OpenAI models via Azure.)

This changes `ListConversationsParams` to take a `model_providers:
Option<Vec<String>>` and adds `model_provider` on each
`ConversationSummary` it returns so these cases can be disambiguated.

Note this ended up making changes to
`codex-rs/core/src/rollout/tests.rs` because it had a number of cases
where it expected `Some` for the value of `next_cursor`, but the list of
rollouts was complete, so according to this docstring:


bcd64c7e72/codex-rs/app-server-protocol/src/protocol.rs (L334-L337)

If there are no more items to return, then `next_cursor` should be
`None`. This PR updates that logic.






---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/5658).
* #5803
* #5793
* __->__ #5658
This commit is contained in:
Michael Bolin
2025-10-27 02:03:30 -07:00
committed by GitHub
parent f178805252
commit 5907422d65
14 changed files with 560 additions and 74 deletions

View File

@@ -327,6 +327,12 @@ pub struct ListConversationsParams {
/// Opaque pagination cursor returned by a previous call. /// Opaque pagination cursor returned by a previous call.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub cursor: Option<String>, pub cursor: Option<String>,
/// Optional model provider filter (matches against session metadata).
/// - None => filter by the server's default model provider
/// - Some([]) => no filtering, include all providers
/// - Some([...]) => only include sessions with one of the specified providers
#[serde(skip_serializing_if = "Option::is_none")]
pub model_providers: Option<Vec<String>>,
} }
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -338,6 +344,8 @@ pub struct ConversationSummary {
/// RFC3339 timestamp string for the session start, if available. /// RFC3339 timestamp string for the session start, if available.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>, pub timestamp: Option<String>,
/// Model provider recorded for the session (resolved when absent in metadata).
pub model_provider: String,
} }
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]

View File

@@ -827,19 +827,38 @@ impl CodexMessageProcessor {
request_id: RequestId, request_id: RequestId,
params: ListConversationsParams, params: ListConversationsParams,
) { ) {
let page_size = params.page_size.unwrap_or(25); let ListConversationsParams {
page_size,
cursor,
model_providers: model_provider,
} = params;
let page_size = page_size.unwrap_or(25);
// Decode the optional cursor string to a Cursor via serde (Cursor implements Deserialize from string) // Decode the optional cursor string to a Cursor via serde (Cursor implements Deserialize from string)
let cursor_obj: Option<RolloutCursor> = match params.cursor { let cursor_obj: Option<RolloutCursor> = match cursor {
Some(s) => serde_json::from_str::<RolloutCursor>(&format!("\"{s}\"")).ok(), Some(s) => serde_json::from_str::<RolloutCursor>(&format!("\"{s}\"")).ok(),
None => None, None => None,
}; };
let cursor_ref = cursor_obj.as_ref(); let cursor_ref = cursor_obj.as_ref();
let model_provider_filter = match model_provider {
Some(providers) => {
if providers.is_empty() {
None
} else {
Some(providers)
}
}
None => Some(vec![self.config.model_provider_id.clone()]),
};
let model_provider_slice = model_provider_filter.as_deref();
let fallback_provider = self.config.model_provider_id.clone();
let page = match RolloutRecorder::list_conversations( let page = match RolloutRecorder::list_conversations(
&self.config.codex_home, &self.config.codex_home,
page_size, page_size,
cursor_ref, cursor_ref,
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
model_provider_slice,
fallback_provider.as_str(),
) )
.await .await
{ {
@@ -858,7 +877,7 @@ impl CodexMessageProcessor {
let items = page let items = page
.items .items
.into_iter() .into_iter()
.filter_map(|it| extract_conversation_summary(it.path, &it.head)) .filter_map(|it| extract_conversation_summary(it.path, &it.head, &fallback_provider))
.collect(); .collect();
// Encode next_cursor as a plain string // Encode next_cursor as a plain string
@@ -1707,6 +1726,7 @@ async fn on_exec_approval_response(
fn extract_conversation_summary( fn extract_conversation_summary(
path: PathBuf, path: PathBuf,
head: &[serde_json::Value], head: &[serde_json::Value],
fallback_provider: &str,
) -> Option<ConversationSummary> { ) -> Option<ConversationSummary> {
let session_meta = match head.first() { let session_meta = match head.first() {
Some(first_line) => serde_json::from_value::<SessionMeta>(first_line.clone()).ok()?, Some(first_line) => serde_json::from_value::<SessionMeta>(first_line.clone()).ok()?,
@@ -1731,12 +1751,17 @@ fn extract_conversation_summary(
} else { } else {
Some(session_meta.timestamp.clone()) Some(session_meta.timestamp.clone())
}; };
let conversation_id = session_meta.id;
let model_provider = session_meta
.model_provider
.unwrap_or_else(|| fallback_provider.to_string());
Some(ConversationSummary { Some(ConversationSummary {
conversation_id: session_meta.id, conversation_id,
timestamp, timestamp,
path, path,
preview: preview.to_string(), preview: preview.to_string(),
model_provider,
}) })
} }
@@ -1760,7 +1785,8 @@ mod tests {
"cwd": "/", "cwd": "/",
"originator": "codex", "originator": "codex",
"cli_version": "0.0.0", "cli_version": "0.0.0",
"instructions": null "instructions": null,
"model_provider": "test-provider"
}), }),
json!({ json!({
"type": "message", "type": "message",
@@ -1780,7 +1806,8 @@ mod tests {
}), }),
]; ];
let summary = extract_conversation_summary(path.clone(), &head).expect("summary"); let summary =
extract_conversation_summary(path.clone(), &head, "test-provider").expect("summary");
assert_eq!(summary.conversation_id, conversation_id); assert_eq!(summary.conversation_id, conversation_id);
assert_eq!( assert_eq!(
@@ -1789,6 +1816,7 @@ mod tests {
); );
assert_eq!(summary.path, path); assert_eq!(summary.path, path);
assert_eq!(summary.preview, "Count to 5"); assert_eq!(summary.preview, "Count to 5");
assert_eq!(summary.model_provider, "test-provider");
Ok(()) Ok(())
} }
} }

View File

@@ -30,18 +30,21 @@ async fn test_list_and_resume_conversations() {
"2025-01-02T12-00-00", "2025-01-02T12-00-00",
"2025-01-02T12:00:00Z", "2025-01-02T12:00:00Z",
"Hello A", "Hello A",
Some("openai"),
); );
create_fake_rollout( create_fake_rollout(
codex_home.path(), codex_home.path(),
"2025-01-01T13-00-00", "2025-01-01T13-00-00",
"2025-01-01T13:00:00Z", "2025-01-01T13:00:00Z",
"Hello B", "Hello B",
Some("openai"),
); );
create_fake_rollout( create_fake_rollout(
codex_home.path(), codex_home.path(),
"2025-01-01T12-00-00", "2025-01-01T12-00-00",
"2025-01-01T12:00:00Z", "2025-01-01T12:00:00Z",
"Hello C", "Hello C",
None,
); );
let mut mcp = McpProcess::new(codex_home.path()) let mut mcp = McpProcess::new(codex_home.path())
@@ -57,6 +60,7 @@ async fn test_list_and_resume_conversations() {
.send_list_conversations_request(ListConversationsParams { .send_list_conversations_request(ListConversationsParams {
page_size: Some(2), page_size: Some(2),
cursor: None, cursor: None,
model_providers: None,
}) })
.await .await
.expect("send listConversations"); .expect("send listConversations");
@@ -74,6 +78,8 @@ async fn test_list_and_resume_conversations() {
// Newest first; preview text should match // Newest first; preview text should match
assert_eq!(items[0].preview, "Hello A"); assert_eq!(items[0].preview, "Hello A");
assert_eq!(items[1].preview, "Hello B"); assert_eq!(items[1].preview, "Hello B");
assert_eq!(items[0].model_provider, "openai");
assert_eq!(items[1].model_provider, "openai");
assert!(items[0].path.is_absolute()); assert!(items[0].path.is_absolute());
assert!(next_cursor.is_some()); assert!(next_cursor.is_some());
@@ -82,6 +88,7 @@ async fn test_list_and_resume_conversations() {
.send_list_conversations_request(ListConversationsParams { .send_list_conversations_request(ListConversationsParams {
page_size: Some(2), page_size: Some(2),
cursor: next_cursor, cursor: next_cursor,
model_providers: None,
}) })
.await .await
.expect("send listConversations page 2"); .expect("send listConversations page 2");
@@ -99,7 +106,88 @@ async fn test_list_and_resume_conversations() {
} = to_response::<ListConversationsResponse>(resp2).expect("deserialize response"); } = to_response::<ListConversationsResponse>(resp2).expect("deserialize response");
assert_eq!(items2.len(), 1); assert_eq!(items2.len(), 1);
assert_eq!(items2[0].preview, "Hello C"); assert_eq!(items2[0].preview, "Hello C");
assert!(next2.is_some()); assert_eq!(items2[0].model_provider, "openai");
assert_eq!(next2, None);
// Add a conversation with an explicit non-OpenAI provider for filter tests.
create_fake_rollout(
codex_home.path(),
"2025-01-01T11-30-00",
"2025-01-01T11:30:00Z",
"Hello TP",
Some("test-provider"),
);
// Filtering by model provider should return only matching sessions.
let filter_req_id = mcp
.send_list_conversations_request(ListConversationsParams {
page_size: Some(10),
cursor: None,
model_providers: Some(vec!["test-provider".to_string()]),
})
.await
.expect("send listConversations filtered");
let filter_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(filter_req_id)),
)
.await
.expect("listConversations filtered timeout")
.expect("listConversations filtered resp");
let ListConversationsResponse {
items: filtered_items,
next_cursor: filtered_next,
} = to_response::<ListConversationsResponse>(filter_resp).expect("deserialize filtered");
assert_eq!(filtered_items.len(), 1);
assert_eq!(filtered_next, None);
assert_eq!(filtered_items[0].preview, "Hello TP");
assert_eq!(filtered_items[0].model_provider, "test-provider");
// Empty filter should include every session regardless of provider metadata.
let unfiltered_req_id = mcp
.send_list_conversations_request(ListConversationsParams {
page_size: Some(10),
cursor: None,
model_providers: Some(Vec::new()),
})
.await
.expect("send listConversations unfiltered");
let unfiltered_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(unfiltered_req_id)),
)
.await
.expect("listConversations unfiltered timeout")
.expect("listConversations unfiltered resp");
let ListConversationsResponse {
items: unfiltered_items,
next_cursor: unfiltered_next,
} = to_response::<ListConversationsResponse>(unfiltered_resp)
.expect("deserialize unfiltered response");
assert_eq!(unfiltered_items.len(), 4);
assert!(unfiltered_next.is_none());
let empty_req_id = mcp
.send_list_conversations_request(ListConversationsParams {
page_size: Some(10),
cursor: None,
model_providers: Some(vec!["other".to_string()]),
})
.await
.expect("send listConversations filtered empty");
let empty_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(empty_req_id)),
)
.await
.expect("listConversations filtered empty timeout")
.expect("listConversations filtered empty resp");
let ListConversationsResponse {
items: empty_items,
next_cursor: empty_next,
} = to_response::<ListConversationsResponse>(empty_resp).expect("deserialize filtered empty");
assert!(empty_items.is_empty());
assert!(empty_next.is_none());
// Now resume one of the sessions and expect a SessionConfigured notification and response. // Now resume one of the sessions and expect a SessionConfigured notification and response.
let resume_req_id = mcp let resume_req_id = mcp
@@ -152,7 +240,13 @@ async fn test_list_and_resume_conversations() {
assert!(!conversation_id.to_string().is_empty()); assert!(!conversation_id.to_string().is_empty());
} }
fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, preview: &str) { fn create_fake_rollout(
codex_home: &Path,
filename_ts: &str,
meta_rfc3339: &str,
preview: &str,
model_provider: Option<&str>,
) {
let uuid = Uuid::new_v4(); let uuid = Uuid::new_v4();
// sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss) // sessions/YYYY/MM/DD/ derived from filename_ts (YYYY-MM-DDThh-mm-ss)
let year = &filename_ts[0..4]; let year = &filename_ts[0..4];
@@ -164,18 +258,22 @@ fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str,
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl")); let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
let mut lines = Vec::new(); let mut lines = Vec::new();
// Meta line with timestamp (flattened meta in payload for new schema) // Meta line with timestamp (flattened meta in payload for new schema)
let mut payload = json!({
"id": uuid,
"timestamp": meta_rfc3339,
"cwd": "/",
"originator": "codex",
"cli_version": "0.0.0",
"instructions": null,
});
if let Some(provider) = model_provider {
payload["model_provider"] = json!(provider);
}
lines.push( lines.push(
json!({ json!({
"timestamp": meta_rfc3339, "timestamp": meta_rfc3339,
"type": "session_meta", "type": "session_meta",
"payload": { "payload": payload
"id": uuid,
"timestamp": meta_rfc3339,
"cwd": "/",
"originator": "codex",
"cli_version": "0.0.0",
"instructions": null
}
}) })
.to_string(), .to_string(),
); );

View File

@@ -54,6 +54,7 @@ struct HeadTailSummary {
saw_session_meta: bool, saw_session_meta: bool,
saw_user_event: bool, saw_user_event: bool,
source: Option<SessionSource>, source: Option<SessionSource>,
model_provider: Option<String>,
created_at: Option<String>, created_at: Option<String>,
updated_at: Option<String>, updated_at: Option<String>,
} }
@@ -109,6 +110,8 @@ pub(crate) async fn get_conversations(
page_size: usize, page_size: usize,
cursor: Option<&Cursor>, cursor: Option<&Cursor>,
allowed_sources: &[SessionSource], allowed_sources: &[SessionSource],
model_providers: Option<&[String]>,
default_provider: &str,
) -> io::Result<ConversationsPage> { ) -> io::Result<ConversationsPage> {
let mut root = codex_home.to_path_buf(); let mut root = codex_home.to_path_buf();
root.push(SESSIONS_SUBDIR); root.push(SESSIONS_SUBDIR);
@@ -124,8 +127,17 @@ pub(crate) async fn get_conversations(
let anchor = cursor.cloned(); let anchor = cursor.cloned();
let result = let provider_matcher =
traverse_directories_for_paths(root.clone(), page_size, anchor, allowed_sources).await?; model_providers.and_then(|filters| ProviderMatcher::new(filters, default_provider));
let result = traverse_directories_for_paths(
root.clone(),
page_size,
anchor,
allowed_sources,
provider_matcher.as_ref(),
)
.await?;
Ok(result) Ok(result)
} }
@@ -145,6 +157,7 @@ async fn traverse_directories_for_paths(
page_size: usize, page_size: usize,
anchor: Option<Cursor>, anchor: Option<Cursor>,
allowed_sources: &[SessionSource], allowed_sources: &[SessionSource],
provider_matcher: Option<&ProviderMatcher<'_>>,
) -> io::Result<ConversationsPage> { ) -> io::Result<ConversationsPage> {
let mut items: Vec<ConversationItem> = Vec::with_capacity(page_size); let mut items: Vec<ConversationItem> = Vec::with_capacity(page_size);
let mut scanned_files = 0usize; let mut scanned_files = 0usize;
@@ -153,6 +166,7 @@ async fn traverse_directories_for_paths(
Some(c) => (c.ts, c.id), Some(c) => (c.ts, c.id),
None => (OffsetDateTime::UNIX_EPOCH, Uuid::nil()), None => (OffsetDateTime::UNIX_EPOCH, Uuid::nil()),
}; };
let mut more_matches_available = false;
let year_dirs = collect_dirs_desc(&root, |s| s.parse::<u16>().ok()).await?; let year_dirs = collect_dirs_desc(&root, |s| s.parse::<u16>().ok()).await?;
@@ -184,6 +198,7 @@ async fn traverse_directories_for_paths(
for (ts, sid, _name_str, path) in day_files.into_iter() { for (ts, sid, _name_str, path) in day_files.into_iter() {
scanned_files += 1; scanned_files += 1;
if scanned_files >= MAX_SCAN_FILES && items.len() >= page_size { if scanned_files >= MAX_SCAN_FILES && items.len() >= page_size {
more_matches_available = true;
break 'outer; break 'outer;
} }
if !anchor_passed { if !anchor_passed {
@@ -194,6 +209,7 @@ async fn traverse_directories_for_paths(
} }
} }
if items.len() == page_size { if items.len() == page_size {
more_matches_available = true;
break 'outer; break 'outer;
} }
// Read head and simultaneously detect message events within the same // Read head and simultaneously detect message events within the same
@@ -208,6 +224,11 @@ async fn traverse_directories_for_paths(
{ {
continue; continue;
} }
if let Some(matcher) = provider_matcher
&& !matcher.matches(summary.model_provider.as_deref())
{
continue;
}
// Apply filters: must have session meta and at least one user message event // Apply filters: must have session meta and at least one user message event
if summary.saw_session_meta && summary.saw_user_event { if summary.saw_session_meta && summary.saw_user_event {
let HeadTailSummary { let HeadTailSummary {
@@ -231,12 +252,21 @@ async fn traverse_directories_for_paths(
} }
} }
let next = build_next_cursor(&items); let reached_scan_cap = scanned_files >= MAX_SCAN_FILES;
if reached_scan_cap && !items.is_empty() {
more_matches_available = true;
}
let next = if more_matches_available {
build_next_cursor(&items)
} else {
None
};
Ok(ConversationsPage { Ok(ConversationsPage {
items, items,
next_cursor: next, next_cursor: next,
num_scanned_files: scanned_files, num_scanned_files: scanned_files,
reached_scan_cap: scanned_files >= MAX_SCAN_FILES, reached_scan_cap,
}) })
} }
@@ -328,6 +358,32 @@ fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uui
Some((ts, uuid)) Some((ts, uuid))
} }
struct ProviderMatcher<'a> {
filters: &'a [String],
matches_default_provider: bool,
}
impl<'a> ProviderMatcher<'a> {
fn new(filters: &'a [String], default_provider: &'a str) -> Option<Self> {
if filters.is_empty() {
return None;
}
let matches_default_provider = filters.iter().any(|provider| provider == default_provider);
Some(Self {
filters,
matches_default_provider,
})
}
fn matches(&self, session_provider: Option<&str>) -> bool {
match session_provider {
Some(provider) => self.filters.iter().any(|candidate| candidate == provider),
None => self.matches_default_provider,
}
}
}
async fn read_head_and_tail( async fn read_head_and_tail(
path: &Path, path: &Path,
head_limit: usize, head_limit: usize,
@@ -354,6 +410,7 @@ async fn read_head_and_tail(
match rollout_line.item { match rollout_line.item {
RolloutItem::SessionMeta(session_meta_line) => { RolloutItem::SessionMeta(session_meta_line) => {
summary.source = Some(session_meta_line.meta.source); summary.source = Some(session_meta_line.meta.source);
summary.model_provider = session_meta_line.meta.model_provider.clone();
summary.created_at = summary summary.created_at = summary
.created_at .created_at
.clone() .clone()

View File

@@ -97,8 +97,18 @@ impl RolloutRecorder {
page_size: usize, page_size: usize,
cursor: Option<&Cursor>, cursor: Option<&Cursor>,
allowed_sources: &[SessionSource], allowed_sources: &[SessionSource],
model_providers: Option<&[String]>,
default_provider: &str,
) -> std::io::Result<ConversationsPage> { ) -> std::io::Result<ConversationsPage> {
get_conversations(codex_home, page_size, cursor, allowed_sources).await get_conversations(
codex_home,
page_size,
cursor,
allowed_sources,
model_providers,
default_provider,
)
.await
} }
/// Attempt to create a new [`RolloutRecorder`]. If the sessions directory /// Attempt to create a new [`RolloutRecorder`]. If the sessions directory
@@ -137,6 +147,7 @@ impl RolloutRecorder {
cli_version: env!("CARGO_PKG_VERSION").to_string(), cli_version: env!("CARGO_PKG_VERSION").to_string(),
instructions, instructions,
source, source,
model_provider: Some(config.model_provider_id.clone()),
}), }),
) )
} }

View File

@@ -32,6 +32,14 @@ use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::UserMessageEvent; use codex_protocol::protocol::UserMessageEvent;
const NO_SOURCE_FILTER: &[SessionSource] = &[]; const NO_SOURCE_FILTER: &[SessionSource] = &[];
const TEST_PROVIDER: &str = "test-provider";
fn provider_vec(providers: &[&str]) -> Vec<String> {
providers
.iter()
.map(std::string::ToString::to_string)
.collect()
}
fn write_session_file( fn write_session_file(
root: &Path, root: &Path,
@@ -39,6 +47,24 @@ fn write_session_file(
uuid: Uuid, uuid: Uuid,
num_records: usize, num_records: usize,
source: Option<SessionSource>, source: Option<SessionSource>,
) -> std::io::Result<(OffsetDateTime, Uuid)> {
write_session_file_with_provider(
root,
ts_str,
uuid,
num_records,
source,
Some("test-provider"),
)
}
fn write_session_file_with_provider(
root: &Path,
ts_str: &str,
uuid: Uuid,
num_records: usize,
source: Option<SessionSource>,
model_provider: Option<&str>,
) -> std::io::Result<(OffsetDateTime, Uuid)> { ) -> std::io::Result<(OffsetDateTime, Uuid)> {
let format: &[FormatItem] = let format: &[FormatItem] =
format_description!("[year]-[month]-[day]T[hour]-[minute]-[second]"); format_description!("[year]-[month]-[day]T[hour]-[minute]-[second]");
@@ -68,6 +94,9 @@ fn write_session_file(
if let Some(source) = source { if let Some(source) = source {
payload["source"] = serde_json::to_value(source).unwrap(); payload["source"] = serde_json::to_value(source).unwrap();
} }
if let Some(provider) = model_provider {
payload["model_provider"] = serde_json::Value::String(provider.to_string());
}
let meta = serde_json::json!({ let meta = serde_json::json!({
"timestamp": ts_str, "timestamp": ts_str,
@@ -134,9 +163,17 @@ async fn test_list_conversations_latest_first() {
) )
.unwrap(); .unwrap();
let page = get_conversations(home, 10, None, INTERACTIVE_SESSION_SOURCES) let provider_filter = provider_vec(&[TEST_PROVIDER]);
.await let page = get_conversations(
.unwrap(); home,
10,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await
.unwrap();
// Build expected objects // Build expected objects
let p1 = home let p1 = home
@@ -166,6 +203,7 @@ async fn test_list_conversations_latest_first() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let head_2 = vec![serde_json::json!({ let head_2 = vec![serde_json::json!({
"id": u2, "id": u2,
@@ -175,6 +213,7 @@ async fn test_list_conversations_latest_first() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let head_1 = vec![serde_json::json!({ let head_1 = vec![serde_json::json!({
"id": u1, "id": u1,
@@ -184,11 +223,9 @@ async fn test_list_conversations_latest_first() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let expected_cursor: Cursor =
serde_json::from_str(&format!("\"2025-01-01T12-00-00|{u1}\"")).unwrap();
let expected = ConversationsPage { let expected = ConversationsPage {
items: vec![ items: vec![
ConversationItem { ConversationItem {
@@ -213,7 +250,7 @@ async fn test_list_conversations_latest_first() {
updated_at: Some("2025-01-01T12-00-00".into()), updated_at: Some("2025-01-01T12-00-00".into()),
}, },
], ],
next_cursor: Some(expected_cursor), next_cursor: None,
num_scanned_files: 3, num_scanned_files: 3,
reached_scan_cap: false, reached_scan_cap: false,
}; };
@@ -275,9 +312,17 @@ async fn test_pagination_cursor() {
) )
.unwrap(); .unwrap();
let page1 = get_conversations(home, 2, None, INTERACTIVE_SESSION_SOURCES) let provider_filter = provider_vec(&[TEST_PROVIDER]);
.await let page1 = get_conversations(
.unwrap(); home,
2,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await
.unwrap();
let p5 = home let p5 = home
.join("sessions") .join("sessions")
.join("2025") .join("2025")
@@ -298,6 +343,7 @@ async fn test_pagination_cursor() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let head_4 = vec![serde_json::json!({ let head_4 = vec![serde_json::json!({
"id": u4, "id": u4,
@@ -307,6 +353,7 @@ async fn test_pagination_cursor() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let expected_cursor1: Cursor = let expected_cursor1: Cursor =
serde_json::from_str(&format!("\"2025-03-04T09-00-00|{u4}\"")).unwrap(); serde_json::from_str(&format!("\"2025-03-04T09-00-00|{u4}\"")).unwrap();
@@ -338,6 +385,8 @@ async fn test_pagination_cursor() {
2, 2,
page1.next_cursor.as_ref(), page1.next_cursor.as_ref(),
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
) )
.await .await
.unwrap(); .unwrap();
@@ -361,6 +410,7 @@ async fn test_pagination_cursor() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let head_2 = vec![serde_json::json!({ let head_2 = vec![serde_json::json!({
"id": u2, "id": u2,
@@ -370,6 +420,7 @@ async fn test_pagination_cursor() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let expected_cursor2: Cursor = let expected_cursor2: Cursor =
serde_json::from_str(&format!("\"2025-03-02T09-00-00|{u2}\"")).unwrap(); serde_json::from_str(&format!("\"2025-03-02T09-00-00|{u2}\"")).unwrap();
@@ -401,6 +452,8 @@ async fn test_pagination_cursor() {
2, 2,
page2.next_cursor.as_ref(), page2.next_cursor.as_ref(),
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
) )
.await .await
.unwrap(); .unwrap();
@@ -418,9 +471,8 @@ async fn test_pagination_cursor() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let expected_cursor3: Cursor =
serde_json::from_str(&format!("\"2025-03-01T09-00-00|{u1}\"")).unwrap();
let expected_page3 = ConversationsPage { let expected_page3 = ConversationsPage {
items: vec![ConversationItem { items: vec![ConversationItem {
path: p1, path: p1,
@@ -429,7 +481,7 @@ async fn test_pagination_cursor() {
created_at: Some("2025-03-01T09-00-00".into()), created_at: Some("2025-03-01T09-00-00".into()),
updated_at: Some("2025-03-01T09-00-00".into()), updated_at: Some("2025-03-01T09-00-00".into()),
}], }],
next_cursor: Some(expected_cursor3), next_cursor: None,
num_scanned_files: 5, // scanned 05, 04 (anchor), 03, 02 (anchor), 01 num_scanned_files: 5, // scanned 05, 04 (anchor), 03, 02 (anchor), 01
reached_scan_cap: false, reached_scan_cap: false,
}; };
@@ -445,9 +497,17 @@ async fn test_get_conversation_contents() {
let ts = "2025-04-01T10-30-00"; let ts = "2025-04-01T10-30-00";
write_session_file(home, ts, uuid, 2, Some(SessionSource::VSCode)).unwrap(); write_session_file(home, ts, uuid, 2, Some(SessionSource::VSCode)).unwrap();
let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES) let provider_filter = provider_vec(&[TEST_PROVIDER]);
.await let page = get_conversations(
.unwrap(); home,
1,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await
.unwrap();
let path = &page.items[0].path; let path = &page.items[0].path;
let content = get_conversation(path).await.unwrap(); let content = get_conversation(path).await.unwrap();
@@ -467,8 +527,8 @@ async fn test_get_conversation_contents() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})]; })];
let expected_cursor: Cursor = serde_json::from_str(&format!("\"{ts}|{uuid}\"")).unwrap();
let expected_page = ConversationsPage { let expected_page = ConversationsPage {
items: vec![ConversationItem { items: vec![ConversationItem {
path: expected_path, path: expected_path,
@@ -477,7 +537,7 @@ async fn test_get_conversation_contents() {
created_at: Some(ts.into()), created_at: Some(ts.into()),
updated_at: Some(ts.into()), updated_at: Some(ts.into()),
}], }],
next_cursor: Some(expected_cursor), next_cursor: None,
num_scanned_files: 1, num_scanned_files: 1,
reached_scan_cap: false, reached_scan_cap: false,
}; };
@@ -495,6 +555,7 @@ async fn test_get_conversation_contents() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
} }
}); });
let user_event = serde_json::json!({ let user_event = serde_json::json!({
@@ -532,6 +593,7 @@ async fn test_tail_includes_last_response_items() -> Result<()> {
originator: "test_originator".into(), originator: "test_originator".into(),
cli_version: "test_version".into(), cli_version: "test_version".into(),
source: SessionSource::VSCode, source: SessionSource::VSCode,
model_provider: Some("test-provider".into()),
}, },
git: None, git: None,
}), }),
@@ -563,7 +625,16 @@ async fn test_tail_includes_last_response_items() -> Result<()> {
} }
drop(file); drop(file);
let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES).await?; let provider_filter = provider_vec(&[TEST_PROVIDER]);
let page = get_conversations(
home,
1,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await?;
let item = page.items.first().expect("conversation item"); let item = page.items.first().expect("conversation item");
let tail_len = item.tail.len(); let tail_len = item.tail.len();
assert_eq!(tail_len, 10usize.min(total_messages)); assert_eq!(tail_len, 10usize.min(total_messages));
@@ -615,6 +686,7 @@ async fn test_tail_handles_short_sessions() -> Result<()> {
originator: "test_originator".into(), originator: "test_originator".into(),
cli_version: "test_version".into(), cli_version: "test_version".into(),
source: SessionSource::VSCode, source: SessionSource::VSCode,
model_provider: Some("test-provider".into()),
}, },
git: None, git: None,
}), }),
@@ -645,7 +717,16 @@ async fn test_tail_handles_short_sessions() -> Result<()> {
} }
drop(file); drop(file);
let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES).await?; let provider_filter = provider_vec(&[TEST_PROVIDER]);
let page = get_conversations(
home,
1,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await?;
let tail = &page.items.first().expect("conversation item").tail; let tail = &page.items.first().expect("conversation item").tail;
assert_eq!(tail.len(), 3); assert_eq!(tail.len(), 3);
@@ -699,6 +780,7 @@ async fn test_tail_skips_trailing_non_responses() -> Result<()> {
originator: "test_originator".into(), originator: "test_originator".into(),
cli_version: "test_version".into(), cli_version: "test_version".into(),
source: SessionSource::VSCode, source: SessionSource::VSCode,
model_provider: Some("test-provider".into()),
}, },
git: None, git: None,
}), }),
@@ -743,7 +825,16 @@ async fn test_tail_skips_trailing_non_responses() -> Result<()> {
writeln!(file, "{}", serde_json::to_string(&shutdown_event)?)?; writeln!(file, "{}", serde_json::to_string(&shutdown_event)?)?;
drop(file); drop(file);
let page = get_conversations(home, 1, None, INTERACTIVE_SESSION_SOURCES).await?; let provider_filter = provider_vec(&[TEST_PROVIDER]);
let page = get_conversations(
home,
1,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await?;
let tail = &page.items.first().expect("conversation item").tail; let tail = &page.items.first().expect("conversation item").tail;
let expected: Vec<serde_json::Value> = (0..4) let expected: Vec<serde_json::Value> = (0..4)
@@ -785,9 +876,17 @@ async fn test_stable_ordering_same_second_pagination() {
write_session_file(home, ts, u2, 0, Some(SessionSource::VSCode)).unwrap(); write_session_file(home, ts, u2, 0, Some(SessionSource::VSCode)).unwrap();
write_session_file(home, ts, u3, 0, Some(SessionSource::VSCode)).unwrap(); write_session_file(home, ts, u3, 0, Some(SessionSource::VSCode)).unwrap();
let page1 = get_conversations(home, 2, None, INTERACTIVE_SESSION_SOURCES) let provider_filter = provider_vec(&[TEST_PROVIDER]);
.await let page1 = get_conversations(
.unwrap(); home,
2,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await
.unwrap();
let p3 = home let p3 = home
.join("sessions") .join("sessions")
@@ -810,6 +909,7 @@ async fn test_stable_ordering_same_second_pagination() {
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version", "cli_version": "test_version",
"source": "vscode", "source": "vscode",
"model_provider": "test-provider",
})] })]
}; };
let expected_cursor1: Cursor = serde_json::from_str(&format!("\"{ts}|{u2}\"")).unwrap(); let expected_cursor1: Cursor = serde_json::from_str(&format!("\"{ts}|{u2}\"")).unwrap();
@@ -841,6 +941,8 @@ async fn test_stable_ordering_same_second_pagination() {
2, 2,
page1.next_cursor.as_ref(), page1.next_cursor.as_ref(),
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
) )
.await .await
.unwrap(); .unwrap();
@@ -850,7 +952,6 @@ async fn test_stable_ordering_same_second_pagination() {
.join("07") .join("07")
.join("01") .join("01")
.join(format!("rollout-2025-07-01T00-00-00-{u1}.jsonl")); .join(format!("rollout-2025-07-01T00-00-00-{u1}.jsonl"));
let expected_cursor2: Cursor = serde_json::from_str(&format!("\"{ts}|{u1}\"")).unwrap();
let expected_page2 = ConversationsPage { let expected_page2 = ConversationsPage {
items: vec![ConversationItem { items: vec![ConversationItem {
path: p1, path: p1,
@@ -859,7 +960,7 @@ async fn test_stable_ordering_same_second_pagination() {
created_at: Some(ts.to_string()), created_at: Some(ts.to_string()),
updated_at: Some(ts.to_string()), updated_at: Some(ts.to_string()),
}], }],
next_cursor: Some(expected_cursor2), next_cursor: None,
num_scanned_files: 3, // scanned u3, u2 (anchor), u1 num_scanned_files: 3, // scanned u3, u2 (anchor), u1
reached_scan_cap: false, reached_scan_cap: false,
}; };
@@ -891,9 +992,17 @@ async fn test_source_filter_excludes_non_matching_sessions() {
) )
.unwrap(); .unwrap();
let interactive_only = get_conversations(home, 10, None, INTERACTIVE_SESSION_SOURCES) let provider_filter = provider_vec(&[TEST_PROVIDER]);
.await let interactive_only = get_conversations(
.unwrap(); home,
10,
None,
INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
TEST_PROVIDER,
)
.await
.unwrap();
let paths: Vec<_> = interactive_only let paths: Vec<_> = interactive_only
.items .items
.iter() .iter()
@@ -905,7 +1014,7 @@ async fn test_source_filter_excludes_non_matching_sessions() {
path.ends_with("rollout-2025-08-02T10-00-00-00000000-0000-0000-0000-00000000002a.jsonl") path.ends_with("rollout-2025-08-02T10-00-00-00000000-0000-0000-0000-00000000002a.jsonl")
})); }));
let all_sessions = get_conversations(home, 10, None, NO_SOURCE_FILTER) let all_sessions = get_conversations(home, 10, None, NO_SOURCE_FILTER, None, TEST_PROVIDER)
.await .await
.unwrap(); .unwrap();
let all_paths: Vec<_> = all_sessions let all_paths: Vec<_> = all_sessions
@@ -921,3 +1030,102 @@ async fn test_source_filter_excludes_non_matching_sessions() {
path.ends_with("rollout-2025-08-01T10-00-00-00000000-0000-0000-0000-00000000004d.jsonl") path.ends_with("rollout-2025-08-01T10-00-00-00000000-0000-0000-0000-00000000004d.jsonl")
})); }));
} }
#[tokio::test]
async fn test_model_provider_filter_selects_only_matching_sessions() -> Result<()> {
let temp = TempDir::new().unwrap();
let home = temp.path();
let openai_id = Uuid::from_u128(1);
let beta_id = Uuid::from_u128(2);
let none_id = Uuid::from_u128(3);
write_session_file_with_provider(
home,
"2025-09-01T12-00-00",
openai_id,
1,
Some(SessionSource::VSCode),
Some("openai"),
)?;
write_session_file_with_provider(
home,
"2025-09-01T11-00-00",
beta_id,
1,
Some(SessionSource::VSCode),
Some("beta"),
)?;
write_session_file_with_provider(
home,
"2025-09-01T10-00-00",
none_id,
1,
Some(SessionSource::VSCode),
None,
)?;
let openai_id_str = openai_id.to_string();
let none_id_str = none_id.to_string();
let openai_filter = provider_vec(&["openai"]);
let openai_sessions = get_conversations(
home,
10,
None,
NO_SOURCE_FILTER,
Some(openai_filter.as_slice()),
"openai",
)
.await?;
assert_eq!(openai_sessions.items.len(), 2);
let openai_ids: Vec<_> = openai_sessions
.items
.iter()
.filter_map(|item| {
item.head
.first()
.and_then(|value| value.get("id"))
.and_then(serde_json::Value::as_str)
.map(str::to_string)
})
.collect();
assert!(openai_ids.contains(&openai_id_str));
assert!(openai_ids.contains(&none_id_str));
let beta_filter = provider_vec(&["beta"]);
let beta_sessions = get_conversations(
home,
10,
None,
NO_SOURCE_FILTER,
Some(beta_filter.as_slice()),
"openai",
)
.await?;
assert_eq!(beta_sessions.items.len(), 1);
let beta_id_str = beta_id.to_string();
let beta_head = beta_sessions
.items
.first()
.and_then(|item| item.head.first())
.and_then(|value| value.get("id"))
.and_then(serde_json::Value::as_str);
assert_eq!(beta_head, Some(beta_id_str.as_str()));
let unknown_filter = provider_vec(&["unknown"]);
let unknown_sessions = get_conversations(
home,
10,
None,
NO_SOURCE_FILTER,
Some(unknown_filter.as_slice()),
"openai",
)
.await?;
assert!(unknown_sessions.items.is_empty());
let all_sessions = get_conversations(home, 10, None, NO_SOURCE_FILTER, None, "openai").await?;
assert_eq!(all_sessions.items.len(), 3);
Ok(())
}

View File

@@ -75,9 +75,17 @@ async fn chat_mode_stream_cli() {
server.verify().await; server.verify().await;
// Verify a new session rollout was created and is discoverable via list_conversations // Verify a new session rollout was created and is discoverable via list_conversations
let page = RolloutRecorder::list_conversations(home.path(), 10, None, &[]) let provider_filter = vec!["mock".to_string()];
.await let page = RolloutRecorder::list_conversations(
.expect("list conversations"); home.path(),
10,
None,
&[],
Some(provider_filter.as_slice()),
"mock",
)
.await
.expect("list conversations");
assert!( assert!(
!page.items.is_empty(), !page.items.is_empty(),
"expected at least one session to be listed" "expected at least one session to be listed"

View File

@@ -154,7 +154,8 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
"instructions": "be nice", "instructions": "be nice",
"cwd": ".", "cwd": ".",
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version" "cli_version": "test_version",
"model_provider": "test-provider"
} }
}) })
) )

View File

@@ -368,7 +368,8 @@ async fn review_input_isolated_from_parent_history() {
"instructions": null, "instructions": null,
"cwd": ".", "cwd": ".",
"originator": "test_originator", "originator": "test_originator",
"cli_version": "test_version" "cli_version": "test_version",
"model_provider": "test-provider"
} }
}); });
f.write_all(format!("{meta_line}\n").as_bytes()) f.write_all(format!("{meta_line}\n").as_bytes())

View File

@@ -28,7 +28,8 @@ fn write_minimal_rollout_with_id(codex_home: &Path, id: Uuid) -> PathBuf {
"instructions": null, "instructions": null,
"cwd": ".", "cwd": ".",
"originator": "test", "originator": "test",
"cli_version": "test" "cli_version": "test",
"model_provider": "test-provider"
} }
}) })
) )

View File

@@ -389,8 +389,16 @@ async fn resolve_resume_path(
args: &crate::cli::ResumeArgs, args: &crate::cli::ResumeArgs,
) -> anyhow::Result<Option<PathBuf>> { ) -> anyhow::Result<Option<PathBuf>> {
if args.last { if args.last {
match codex_core::RolloutRecorder::list_conversations(&config.codex_home, 1, None, &[]) let default_provider_filter = vec![config.model_provider_id.clone()];
.await match codex_core::RolloutRecorder::list_conversations(
&config.codex_home,
1,
None,
&[],
Some(default_provider_filter.as_slice()),
&config.model_provider_id,
)
.await
{ {
Ok(page) => Ok(page.items.first().map(|it| it.path.clone())), Ok(page) => Ok(page.items.first().map(|it| it.path.clone())),
Err(e) => { Err(e) => {

View File

@@ -935,6 +935,7 @@ pub struct SessionMeta {
pub instructions: Option<String>, pub instructions: Option<String>,
#[serde(default)] #[serde(default)]
pub source: SessionSource, pub source: SessionSource,
pub model_provider: Option<String>,
} }
impl Default for SessionMeta { impl Default for SessionMeta {
@@ -947,6 +948,7 @@ impl Default for SessionMeta {
cli_version: String::new(), cli_version: String::new(),
instructions: None, instructions: None,
source: SessionSource::default(), source: SessionSource::default(),
model_provider: None,
} }
} }
} }

View File

@@ -391,11 +391,14 @@ async fn run_ratatui_app(
} }
} }
} else if cli.resume_last { } else if cli.resume_last {
let provider_filter = vec![config.model_provider_id.clone()];
match RolloutRecorder::list_conversations( match RolloutRecorder::list_conversations(
&config.codex_home, &config.codex_home,
1, 1,
None, None,
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
&config.model_provider_id,
) )
.await .await
{ {
@@ -407,7 +410,13 @@ async fn run_ratatui_app(
Err(_) => resume_picker::ResumeSelection::StartFresh, Err(_) => resume_picker::ResumeSelection::StartFresh,
} }
} else if cli.resume_picker { } else if cli.resume_picker {
match resume_picker::run_resume_picker(&mut tui, &config.codex_home).await? { match resume_picker::run_resume_picker(
&mut tui,
&config.codex_home,
&config.model_provider_id,
)
.await?
{
resume_picker::ResumeSelection::Exit => { resume_picker::ResumeSelection::Exit => {
restore(); restore();
session_log::log_session_end(); session_log::log_session_end();

View File

@@ -49,6 +49,7 @@ struct PageLoadRequest {
cursor: Option<Cursor>, cursor: Option<Cursor>,
request_token: usize, request_token: usize,
search_token: Option<usize>, search_token: Option<usize>,
default_provider: String,
} }
type PageLoader = Arc<dyn Fn(PageLoadRequest) + Send + Sync>; type PageLoader = Arc<dyn Fn(PageLoadRequest) + Send + Sync>;
@@ -64,19 +65,28 @@ enum BackgroundEvent {
/// Interactive session picker that lists recorded rollout files with simple /// Interactive session picker that lists recorded rollout files with simple
/// search and pagination. Shows the first user input as the preview, relative /// search and pagination. Shows the first user input as the preview, relative
/// time (e.g., "5 seconds ago"), and the absolute path. /// time (e.g., "5 seconds ago"), and the absolute path.
pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result<ResumeSelection> { pub async fn run_resume_picker(
tui: &mut Tui,
codex_home: &Path,
default_provider: &str,
) -> Result<ResumeSelection> {
let alt = AltScreenGuard::enter(tui); let alt = AltScreenGuard::enter(tui);
let (bg_tx, bg_rx) = mpsc::unbounded_channel(); let (bg_tx, bg_rx) = mpsc::unbounded_channel();
let default_provider = default_provider.to_string();
let loader_tx = bg_tx.clone(); let loader_tx = bg_tx.clone();
let page_loader: PageLoader = Arc::new(move |request: PageLoadRequest| { let page_loader: PageLoader = Arc::new(move |request: PageLoadRequest| {
let tx = loader_tx.clone(); let tx = loader_tx.clone();
tokio::spawn(async move { tokio::spawn(async move {
let provider_filter = vec![request.default_provider.clone()];
let page = RolloutRecorder::list_conversations( let page = RolloutRecorder::list_conversations(
&request.codex_home, &request.codex_home,
PAGE_SIZE, PAGE_SIZE,
request.cursor.as_ref(), request.cursor.as_ref(),
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
request.default_provider.as_str(),
) )
.await; .await;
let _ = tx.send(BackgroundEvent::PageLoaded { let _ = tx.send(BackgroundEvent::PageLoaded {
@@ -91,6 +101,7 @@ pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result<Resum
codex_home.to_path_buf(), codex_home.to_path_buf(),
alt.tui.frame_requester(), alt.tui.frame_requester(),
page_loader, page_loader,
default_provider.clone(),
); );
state.load_initial_page().await?; state.load_initial_page().await?;
state.request_frame(); state.request_frame();
@@ -165,6 +176,7 @@ struct PickerState {
next_search_token: usize, next_search_token: usize,
page_loader: PageLoader, page_loader: PageLoader,
view_rows: Option<usize>, view_rows: Option<usize>,
default_provider: String,
} }
struct PaginationState { struct PaginationState {
@@ -225,7 +237,12 @@ struct Row {
} }
impl PickerState { impl PickerState {
fn new(codex_home: PathBuf, requester: FrameRequester, page_loader: PageLoader) -> Self { fn new(
codex_home: PathBuf,
requester: FrameRequester,
page_loader: PageLoader,
default_provider: String,
) -> Self {
Self { Self {
codex_home, codex_home,
requester, requester,
@@ -246,6 +263,7 @@ impl PickerState {
next_search_token: 0, next_search_token: 0,
page_loader, page_loader,
view_rows: None, view_rows: None,
default_provider,
} }
} }
@@ -324,11 +342,14 @@ impl PickerState {
} }
async fn load_initial_page(&mut self) -> Result<()> { async fn load_initial_page(&mut self) -> Result<()> {
let provider_filter = vec![self.default_provider.clone()];
let page = RolloutRecorder::list_conversations( let page = RolloutRecorder::list_conversations(
&self.codex_home, &self.codex_home,
PAGE_SIZE, PAGE_SIZE,
None, None,
INTERACTIVE_SESSION_SOURCES, INTERACTIVE_SESSION_SOURCES,
Some(provider_filter.as_slice()),
self.default_provider.as_str(),
) )
.await?; .await?;
self.reset_pagination(); self.reset_pagination();
@@ -552,6 +573,7 @@ impl PickerState {
cursor: Some(cursor), cursor: Some(cursor),
request_token, request_token,
search_token, search_token,
default_provider: self.default_provider.clone(),
}); });
} }
@@ -1061,8 +1083,12 @@ mod tests {
use ratatui::layout::Layout; use ratatui::layout::Layout;
let loader: PageLoader = Arc::new(|_| {}); let loader: PageLoader = Arc::new(|_| {});
let mut state = let mut state = PickerState::new(
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); PathBuf::from("/tmp"),
FrameRequester::test_dummy(),
loader,
String::from("openai"),
);
let now = Utc::now(); let now = Utc::now();
let rows = vec![ let rows = vec![
@@ -1117,8 +1143,12 @@ mod tests {
#[test] #[test]
fn pageless_scrolling_deduplicates_and_keeps_order() { fn pageless_scrolling_deduplicates_and_keeps_order() {
let loader: PageLoader = Arc::new(|_| {}); let loader: PageLoader = Arc::new(|_| {});
let mut state = let mut state = PickerState::new(
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); PathBuf::from("/tmp"),
FrameRequester::test_dummy(),
loader,
String::from("openai"),
);
state.reset_pagination(); state.reset_pagination();
state.ingest_page(page( state.ingest_page(page(
@@ -1179,8 +1209,12 @@ mod tests {
request_sink.lock().unwrap().push(req); request_sink.lock().unwrap().push(req);
}); });
let mut state = let mut state = PickerState::new(
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); PathBuf::from("/tmp"),
FrameRequester::test_dummy(),
loader,
String::from("openai"),
);
state.reset_pagination(); state.reset_pagination();
state.ingest_page(page( state.ingest_page(page(
vec![ vec![
@@ -1204,8 +1238,12 @@ mod tests {
#[test] #[test]
fn page_navigation_uses_view_rows() { fn page_navigation_uses_view_rows() {
let loader: PageLoader = Arc::new(|_| {}); let loader: PageLoader = Arc::new(|_| {});
let mut state = let mut state = PickerState::new(
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); PathBuf::from("/tmp"),
FrameRequester::test_dummy(),
loader,
String::from("openai"),
);
let mut items = Vec::new(); let mut items = Vec::new();
for idx in 0..20 { for idx in 0..20 {
@@ -1248,8 +1286,12 @@ mod tests {
#[test] #[test]
fn up_at_bottom_does_not_scroll_when_visible() { fn up_at_bottom_does_not_scroll_when_visible() {
let loader: PageLoader = Arc::new(|_| {}); let loader: PageLoader = Arc::new(|_| {});
let mut state = let mut state = PickerState::new(
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); PathBuf::from("/tmp"),
FrameRequester::test_dummy(),
loader,
String::from("openai"),
);
let mut items = Vec::new(); let mut items = Vec::new();
for idx in 0..10 { for idx in 0..10 {
@@ -1288,8 +1330,12 @@ mod tests {
request_sink.lock().unwrap().push(req); request_sink.lock().unwrap().push(req);
}); });
let mut state = let mut state = PickerState::new(
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader); PathBuf::from("/tmp"),
FrameRequester::test_dummy(),
loader,
String::from("openai"),
);
state.reset_pagination(); state.reset_pagination();
state.ingest_page(page( state.ingest_page(page(
vec![make_item( vec![make_item(