Add Updated at time in resume picker (#4468)

<img width="639" height="281" alt="image"
src="https://github.com/user-attachments/assets/92b2ad2b-9e18-4485-9b8d-d7056eb98651"
/>
This commit is contained in:
Ahmed Ibrahim
2025-10-01 10:40:43 -07:00
committed by GitHub
parent 699c121606
commit d78d0764aa
4 changed files with 420 additions and 79 deletions

View File

@@ -40,10 +40,24 @@ pub struct ConversationItem {
pub head: Vec<serde_json::Value>,
/// Last up to `TAIL_RECORD_LIMIT` JSONL response records parsed as JSON.
pub tail: Vec<serde_json::Value>,
/// RFC3339 timestamp string for when the session was created, if available.
pub created_at: Option<String>,
/// RFC3339 timestamp string for the most recent response in the tail, if available.
pub updated_at: Option<String>,
}
#[derive(Default)]
struct HeadTailSummary {
head: Vec<serde_json::Value>,
tail: Vec<serde_json::Value>,
saw_session_meta: bool,
saw_user_event: bool,
created_at: Option<String>,
updated_at: Option<String>,
}
/// Hard cap to bound worstcase work per request.
const MAX_SCAN_FILES: usize = 100;
const MAX_SCAN_FILES: usize = 10000;
const HEAD_RECORD_LIMIT: usize = 10;
const TAIL_RECORD_LIMIT: usize = 10;
@@ -179,13 +193,26 @@ async fn traverse_directories_for_paths(
}
// Read head and simultaneously detect message events within the same
// first N JSONL records to avoid a second file read.
let (head, tail, saw_session_meta, saw_user_event) =
read_head_and_tail(&path, HEAD_RECORD_LIMIT, TAIL_RECORD_LIMIT)
.await
.unwrap_or((Vec::new(), Vec::new(), false, false));
let summary = read_head_and_tail(&path, HEAD_RECORD_LIMIT, TAIL_RECORD_LIMIT)
.await
.unwrap_or_default();
// Apply filters: must have session meta and at least one user message event
if saw_session_meta && saw_user_event {
items.push(ConversationItem { path, head, tail });
if summary.saw_session_meta && summary.saw_user_event {
let HeadTailSummary {
head,
tail,
created_at,
mut updated_at,
..
} = summary;
updated_at = updated_at.or_else(|| created_at.clone());
items.push(ConversationItem {
path,
head,
tail,
created_at,
updated_at,
});
}
}
}
@@ -293,17 +320,15 @@ async fn read_head_and_tail(
path: &Path,
head_limit: usize,
tail_limit: usize,
) -> io::Result<(Vec<serde_json::Value>, Vec<serde_json::Value>, bool, bool)> {
) -> io::Result<HeadTailSummary> {
use tokio::io::AsyncBufReadExt;
let file = tokio::fs::File::open(path).await?;
let reader = tokio::io::BufReader::new(file);
let mut lines = reader.lines();
let mut head: Vec<serde_json::Value> = Vec::new();
let mut saw_session_meta = false;
let mut saw_user_event = false;
let mut summary = HeadTailSummary::default();
while head.len() < head_limit {
while summary.head.len() < head_limit {
let line_opt = lines.next_line().await?;
let Some(line) = line_opt else { break };
let trimmed = line.trim();
@@ -316,14 +341,22 @@ async fn read_head_and_tail(
match rollout_line.item {
RolloutItem::SessionMeta(session_meta_line) => {
summary.created_at = summary
.created_at
.clone()
.or_else(|| Some(rollout_line.timestamp.clone()));
if let Ok(val) = serde_json::to_value(session_meta_line) {
head.push(val);
saw_session_meta = true;
summary.head.push(val);
summary.saw_session_meta = true;
}
}
RolloutItem::ResponseItem(item) => {
summary.created_at = summary
.created_at
.clone()
.or_else(|| Some(rollout_line.timestamp.clone()));
if let Ok(val) = serde_json::to_value(item) {
head.push(val);
summary.head.push(val);
}
}
RolloutItem::TurnContext(_) => {
@@ -334,28 +367,30 @@ async fn read_head_and_tail(
}
RolloutItem::EventMsg(ev) => {
if matches!(ev, EventMsg::UserMessage(_)) {
saw_user_event = true;
summary.saw_user_event = true;
}
}
}
}
let tail = if tail_limit == 0 {
Vec::new()
} else {
read_tail_records(path, tail_limit).await?
};
Ok((head, tail, saw_session_meta, saw_user_event))
if tail_limit != 0 {
let (tail, updated_at) = read_tail_records(path, tail_limit).await?;
summary.tail = tail;
summary.updated_at = updated_at;
}
Ok(summary)
}
async fn read_tail_records(path: &Path, max_records: usize) -> io::Result<Vec<serde_json::Value>> {
async fn read_tail_records(
path: &Path,
max_records: usize,
) -> io::Result<(Vec<serde_json::Value>, Option<String>)> {
use std::io::SeekFrom;
use tokio::io::AsyncReadExt;
use tokio::io::AsyncSeekExt;
if max_records == 0 {
return Ok(Vec::new());
return Ok((Vec::new(), None));
}
const CHUNK_SIZE: usize = 8192;
@@ -363,24 +398,28 @@ async fn read_tail_records(path: &Path, max_records: usize) -> io::Result<Vec<se
let mut file = tokio::fs::File::open(path).await?;
let mut pos = file.seek(SeekFrom::End(0)).await?;
if pos == 0 {
return Ok(Vec::new());
return Ok((Vec::new(), None));
}
let mut buffer: Vec<u8> = Vec::new();
let mut latest_timestamp: Option<String> = None;
loop {
let slice_start = match (pos > 0, buffer.iter().position(|&b| b == b'\n')) {
(true, Some(idx)) => idx + 1,
_ => 0,
};
let tail = collect_last_response_values(&buffer[slice_start..], max_records);
let (tail, newest_ts) = collect_last_response_values(&buffer[slice_start..], max_records);
if latest_timestamp.is_none() {
latest_timestamp = newest_ts.clone();
}
if tail.len() >= max_records || pos == 0 {
return Ok(tail);
return Ok((tail, latest_timestamp.or(newest_ts)));
}
let read_size = CHUNK_SIZE.min(pos as usize);
if read_size == 0 {
return Ok(tail);
return Ok((tail, latest_timestamp.or(newest_ts)));
}
pos -= read_size as u64;
file.seek(SeekFrom::Start(pos)).await?;
@@ -391,15 +430,19 @@ async fn read_tail_records(path: &Path, max_records: usize) -> io::Result<Vec<se
}
}
fn collect_last_response_values(buffer: &[u8], max_records: usize) -> Vec<serde_json::Value> {
fn collect_last_response_values(
buffer: &[u8],
max_records: usize,
) -> (Vec<serde_json::Value>, Option<String>) {
use std::borrow::Cow;
if buffer.is_empty() || max_records == 0 {
return Vec::new();
return (Vec::new(), None);
}
let text: Cow<'_, str> = String::from_utf8_lossy(buffer);
let mut collected_rev: Vec<serde_json::Value> = Vec::new();
let mut latest_timestamp: Option<String> = None;
for line in text.lines().rev() {
let trimmed = line.trim();
if trimmed.is_empty() {
@@ -407,9 +450,13 @@ fn collect_last_response_values(buffer: &[u8], max_records: usize) -> Vec<serde_
}
let parsed: serde_json::Result<RolloutLine> = serde_json::from_str(trimmed);
let Ok(rollout_line) = parsed else { continue };
if let RolloutItem::ResponseItem(item) = rollout_line.item
&& let Ok(val) = serde_json::to_value(item)
let RolloutLine { timestamp, item } = rollout_line;
if let RolloutItem::ResponseItem(item) = item
&& let Ok(val) = serde_json::to_value(&item)
{
if latest_timestamp.is_none() {
latest_timestamp = Some(timestamp.clone());
}
collected_rev.push(val);
if collected_rev.len() == max_records {
break;
@@ -417,7 +464,7 @@ fn collect_last_response_values(buffer: &[u8], max_records: usize) -> Vec<serde_
}
}
collected_rev.reverse();
collected_rev
(collected_rev, latest_timestamp)
}
/// Locate a recorded conversation rollout file by its UUID string using the existing

View File

@@ -159,16 +159,22 @@ async fn test_list_conversations_latest_first() {
path: p1,
head: head_3,
tail: Vec::new(),
created_at: Some("2025-01-03T12-00-00".into()),
updated_at: Some("2025-01-03T12-00-00".into()),
},
ConversationItem {
path: p2,
head: head_2,
tail: Vec::new(),
created_at: Some("2025-01-02T12-00-00".into()),
updated_at: Some("2025-01-02T12-00-00".into()),
},
ConversationItem {
path: p3,
head: head_1,
tail: Vec::new(),
created_at: Some("2025-01-01T12-00-00".into()),
updated_at: Some("2025-01-01T12-00-00".into()),
},
],
next_cursor: Some(expected_cursor),
@@ -235,11 +241,15 @@ async fn test_pagination_cursor() {
path: p5,
head: head_5,
tail: Vec::new(),
created_at: Some("2025-03-05T09-00-00".into()),
updated_at: Some("2025-03-05T09-00-00".into()),
},
ConversationItem {
path: p4,
head: head_4,
tail: Vec::new(),
created_at: Some("2025-03-04T09-00-00".into()),
updated_at: Some("2025-03-04T09-00-00".into()),
},
],
next_cursor: Some(expected_cursor1.clone()),
@@ -287,11 +297,15 @@ async fn test_pagination_cursor() {
path: p3,
head: head_3,
tail: Vec::new(),
created_at: Some("2025-03-03T09-00-00".into()),
updated_at: Some("2025-03-03T09-00-00".into()),
},
ConversationItem {
path: p2,
head: head_2,
tail: Vec::new(),
created_at: Some("2025-03-02T09-00-00".into()),
updated_at: Some("2025-03-02T09-00-00".into()),
},
],
next_cursor: Some(expected_cursor2.clone()),
@@ -324,6 +338,8 @@ async fn test_pagination_cursor() {
path: p1,
head: head_1,
tail: Vec::new(),
created_at: Some("2025-03-01T09-00-00".into()),
updated_at: Some("2025-03-01T09-00-00".into()),
}],
next_cursor: Some(expected_cursor3),
num_scanned_files: 5, // scanned 05, 04 (anchor), 03, 02 (anchor), 01
@@ -367,6 +383,8 @@ async fn test_get_conversation_contents() {
path: expected_path,
head: expected_head,
tail: Vec::new(),
created_at: Some(ts.into()),
updated_at: Some(ts.into()),
}],
next_cursor: Some(expected_cursor),
num_scanned_files: 1,
@@ -449,18 +467,23 @@ async fn test_tail_includes_last_response_items() -> Result<()> {
let expected: Vec<serde_json::Value> = (total_messages - tail_len..total_messages)
.map(|idx| {
serde_json::to_value(ResponseItem::Message {
id: None,
role: "assistant".into(),
content: vec![ContentItem::OutputText {
text: format!("reply-{idx}"),
}],
serde_json::json!({
"type": "message",
"role": "assistant",
"content": [
{
"type": "output_text",
"text": format!("reply-{idx}"),
}
],
})
.expect("serialize response item")
})
.collect();
assert_eq!(item.tail, expected);
assert_eq!(item.created_at.as_deref(), Some(ts));
let expected_updated = format!("{ts}-{last:02}", last = total_messages - 1);
assert_eq!(item.updated_at.as_deref(), Some(expected_updated.as_str()));
Ok(())
}
@@ -526,18 +549,25 @@ async fn test_tail_handles_short_sessions() -> Result<()> {
let expected: Vec<serde_json::Value> = (0..3)
.map(|idx| {
serde_json::to_value(ResponseItem::Message {
id: None,
role: "assistant".into(),
content: vec![ContentItem::OutputText {
text: format!("short-{idx}"),
}],
serde_json::json!({
"type": "message",
"role": "assistant",
"content": [
{
"type": "output_text",
"text": format!("short-{idx}"),
}
],
})
.expect("serialize response item")
})
.collect();
assert_eq!(tail, &expected);
let expected_updated = format!("{ts}-{last:02}", last = 2);
assert_eq!(
page.items[0].updated_at.as_deref(),
Some(expected_updated.as_str())
);
Ok(())
}
@@ -615,18 +645,25 @@ async fn test_tail_skips_trailing_non_responses() -> Result<()> {
let expected: Vec<serde_json::Value> = (0..4)
.map(|idx| {
serde_json::to_value(ResponseItem::Message {
id: None,
role: "assistant".into(),
content: vec![ContentItem::OutputText {
text: format!("response-{idx}"),
}],
serde_json::json!({
"type": "message",
"role": "assistant",
"content": [
{
"type": "output_text",
"text": format!("response-{idx}"),
}
],
})
.expect("serialize response item")
})
.collect();
assert_eq!(tail, &expected);
let expected_updated = format!("{ts}-{last:02}", last = 3);
assert_eq!(
page.items[0].updated_at.as_deref(),
Some(expected_updated.as_str())
);
Ok(())
}
@@ -676,11 +713,15 @@ async fn test_stable_ordering_same_second_pagination() {
path: p3,
head: head(u3),
tail: Vec::new(),
created_at: Some(ts.to_string()),
updated_at: Some(ts.to_string()),
},
ConversationItem {
path: p2,
head: head(u2),
tail: Vec::new(),
created_at: Some(ts.to_string()),
updated_at: Some(ts.to_string()),
},
],
next_cursor: Some(expected_cursor1.clone()),
@@ -704,6 +745,8 @@ async fn test_stable_ordering_same_second_pagination() {
path: p1,
head: head(u1),
tail: Vec::new(),
created_at: Some(ts.to_string()),
updated_at: Some(ts.to_string()),
}],
next_cursor: Some(expected_cursor2),
num_scanned_files: 3, // scanned u3, u2 (anchor), u1

View File

@@ -22,6 +22,7 @@ use ratatui::text::Span;
use tokio::sync::mpsc;
use tokio_stream::StreamExt;
use tokio_stream::wrappers::UnboundedReceiverStream;
use unicode_width::UnicodeWidthStr;
use crate::text_formatting::truncate_text;
use crate::tui::FrameRequester;
@@ -110,7 +111,7 @@ pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result<Resum
}
TuiEvent::Draw => {
if let Ok(size) = alt.tui.terminal.size() {
let list_height = size.height.saturating_sub(3) as usize;
let list_height = size.height.saturating_sub(4) as usize;
state.update_view_rows(list_height);
state.ensure_minimum_rows_for_view(list_height);
}
@@ -218,7 +219,8 @@ impl SearchState {
struct Row {
path: PathBuf,
preview: String,
ts: Option<DateTime<Utc>>,
created_at: Option<DateTime<Utc>>,
updated_at: Option<DateTime<Utc>>,
}
impl PickerState {
@@ -564,13 +566,16 @@ fn rows_from_items(items: Vec<ConversationItem>) -> Vec<Row> {
}
fn head_to_row(item: &ConversationItem) -> Row {
let mut ts: Option<DateTime<Utc>> = None;
if let Some(first) = item.head.first()
&& let Some(t) = first.get("timestamp").and_then(|v| v.as_str())
&& let Ok(parsed) = chrono::DateTime::parse_from_rfc3339(t)
{
ts = Some(parsed.with_timezone(&Utc));
}
let created_at = item
.created_at
.as_deref()
.and_then(parse_timestamp_str)
.or_else(|| item.head.first().and_then(extract_timestamp));
let updated_at = item
.updated_at
.as_deref()
.and_then(parse_timestamp_str)
.or(created_at);
let preview = preview_from_head(&item.head)
.map(|s| s.trim().to_string())
@@ -580,10 +585,25 @@ fn head_to_row(item: &ConversationItem) -> Row {
Row {
path: item.path.clone(),
preview,
ts,
created_at,
updated_at,
}
}
fn parse_timestamp_str(ts: &str) -> Option<DateTime<Utc>> {
chrono::DateTime::parse_from_rfc3339(ts)
.map(|dt| dt.with_timezone(&Utc))
.ok()
}
fn extract_timestamp(value: &serde_json::Value) -> Option<DateTime<Utc>> {
value
.get("timestamp")
.and_then(|v| v.as_str())
.and_then(|t| chrono::DateTime::parse_from_rfc3339(t).ok())
.map(|dt| dt.with_timezone(&Utc))
}
fn preview_from_head(head: &[serde_json::Value]) -> Option<String> {
head.iter()
.filter_map(|value| serde_json::from_value::<ResponseItem>(value.clone()).ok())
@@ -627,10 +647,11 @@ fn draw_picker(tui: &mut Tui, state: &PickerState) -> std::io::Result<()> {
let height = tui.terminal.size()?.height;
tui.draw(height, |frame| {
let area = frame.area();
let [header, search, list, hint] = Layout::vertical([
let [header, search, columns, list, hint] = Layout::vertical([
Constraint::Length(1),
Constraint::Length(1),
Constraint::Min(area.height.saturating_sub(3)),
Constraint::Length(1),
Constraint::Min(area.height.saturating_sub(4)),
Constraint::Length(1),
])
.areas(area);
@@ -649,8 +670,11 @@ fn draw_picker(tui: &mut Tui, state: &PickerState) -> std::io::Result<()> {
};
frame.render_widget_ref(Line::from(q), search);
// List
render_list(frame, list, state);
let metrics = calculate_column_metrics(&state.filtered_rows);
// Column headers and list
render_column_headers(frame, columns, &metrics);
render_list(frame, list, state, &metrics);
// Hint line
let hint_line: Line = vec![
@@ -671,7 +695,12 @@ fn draw_picker(tui: &mut Tui, state: &PickerState) -> std::io::Result<()> {
})
}
fn render_list(frame: &mut crate::custom_terminal::Frame, area: Rect, state: &PickerState) {
fn render_list(
frame: &mut crate::custom_terminal::Frame,
area: Rect,
state: &PickerState,
metrics: &ColumnMetrics,
) {
if area.height == 0 {
return;
}
@@ -686,20 +715,58 @@ fn render_list(frame: &mut crate::custom_terminal::Frame, area: Rect, state: &Pi
let capacity = area.height as usize;
let start = state.scroll_top.min(rows.len().saturating_sub(1));
let end = rows.len().min(start + capacity);
let labels = &metrics.labels;
let mut y = area.y;
for (idx, row) in rows[start..end].iter().enumerate() {
let max_created_width = metrics.max_created_width;
let max_updated_width = metrics.max_updated_width;
for (idx, (row, (created_label, updated_label))) in rows[start..end]
.iter()
.zip(labels[start..end].iter())
.enumerate()
{
let is_sel = start + idx == state.selected;
let marker = if is_sel { "> ".bold() } else { " ".into() };
let ts = row
.ts
.map(human_time_ago)
.unwrap_or_else(|| "".to_string())
.dim();
let max_cols = area.width.saturating_sub(6) as usize;
let preview = truncate_text(&row.preview, max_cols);
let marker_width = 2usize;
let created_span = if max_created_width == 0 {
None
} else {
Some(Span::from(format!("{created_label:<max_created_width$}")).dim())
};
let updated_span = if max_updated_width == 0 {
None
} else {
Some(Span::from(format!("{updated_label:<max_updated_width$}")).dim())
};
let mut preview_width = area.width as usize;
preview_width = preview_width.saturating_sub(marker_width);
if max_created_width > 0 {
preview_width = preview_width.saturating_sub(max_created_width + 2);
}
if max_updated_width > 0 {
preview_width = preview_width.saturating_sub(max_updated_width + 2);
}
let add_leading_gap = max_created_width == 0 && max_updated_width == 0;
if add_leading_gap {
preview_width = preview_width.saturating_sub(2);
}
let preview = truncate_text(&row.preview, preview_width);
let mut spans: Vec<Span> = vec![marker];
if let Some(created) = created_span {
spans.push(created);
spans.push(" ".into());
}
if let Some(updated) = updated_span {
spans.push(updated);
spans.push(" ".into());
}
if add_leading_gap {
spans.push(" ".into());
}
spans.push(preview.into());
let line: Line = vec![marker, ts, " ".into(), preview.into()].into();
let line: Line = spans.into();
let rect = Rect::new(area.x, y, area.width, 1);
frame.render_widget_ref(line, rect);
y = y.saturating_add(1);
@@ -775,14 +842,89 @@ fn human_time_ago(ts: DateTime<Utc>) -> String {
}
}
fn format_created_label(row: &Row) -> String {
row.created_at
.map(human_time_ago)
.unwrap_or_else(|| "-".to_string())
}
fn format_updated_label(row: &Row) -> String {
match (row.updated_at, row.created_at) {
(Some(updated), _) => human_time_ago(updated),
(None, Some(created)) => human_time_ago(created),
(None, None) => "-".to_string(),
}
}
fn render_column_headers(
frame: &mut crate::custom_terminal::Frame,
area: Rect,
metrics: &ColumnMetrics,
) {
if area.height == 0 {
return;
}
let mut spans: Vec<Span> = vec![" ".into()];
if metrics.max_created_width > 0 {
let label = format!(
"{text:<width$}",
text = "Created",
width = metrics.max_created_width
);
spans.push(Span::from(label).bold());
spans.push(" ".into());
}
if metrics.max_updated_width > 0 {
let label = format!(
"{text:<width$}",
text = "Updated",
width = metrics.max_updated_width
);
spans.push(Span::from(label).bold());
spans.push(" ".into());
}
spans.push("Conversation".bold());
frame.render_widget_ref(Line::from(spans), area);
}
struct ColumnMetrics {
max_created_width: usize,
max_updated_width: usize,
labels: Vec<(String, String)>,
}
fn calculate_column_metrics(rows: &[Row]) -> ColumnMetrics {
let mut labels: Vec<(String, String)> = Vec::with_capacity(rows.len());
let mut max_created_width = UnicodeWidthStr::width("Created");
let mut max_updated_width = UnicodeWidthStr::width("Updated");
for row in rows {
let created = format_created_label(row);
let updated = format_updated_label(row);
max_created_width = max_created_width.max(UnicodeWidthStr::width(created.as_str()));
max_updated_width = max_updated_width.max(UnicodeWidthStr::width(updated.as_str()));
labels.push((created, updated));
}
ColumnMetrics {
max_created_width,
max_updated_width,
labels,
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::Duration;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
use insta::assert_snapshot;
use serde_json::json;
use std::future::Future;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
@@ -805,6 +947,8 @@ mod tests {
path: PathBuf::from(path),
head: head_with_ts_and_user_text(ts, &[preview]),
tail: Vec::new(),
created_at: Some(ts.to_string()),
updated_at: Some(ts.to_string()),
}
}
@@ -865,11 +1009,15 @@ mod tests {
path: PathBuf::from("/tmp/a.jsonl"),
head: head_with_ts_and_user_text("2025-01-01T00:00:00Z", &["A"]),
tail: Vec::new(),
created_at: Some("2025-01-01T00:00:00Z".into()),
updated_at: Some("2025-01-01T00:00:00Z".into()),
};
let b = ConversationItem {
path: PathBuf::from("/tmp/b.jsonl"),
head: head_with_ts_and_user_text("2025-01-02T00:00:00Z", &["B"]),
tail: Vec::new(),
created_at: Some("2025-01-02T00:00:00Z".into()),
updated_at: Some("2025-01-02T00:00:00Z".into()),
};
let rows = rows_from_items(vec![a, b]);
assert_eq!(rows.len(), 2);
@@ -878,6 +1026,101 @@ mod tests {
assert!(rows[1].preview.contains('B'));
}
#[test]
fn row_uses_tail_timestamp_for_updated_at() {
let head = head_with_ts_and_user_text("2025-01-01T00:00:00Z", &["Hello"]);
let tail = vec![json!({
"timestamp": "2025-01-01T01:00:00Z",
"type": "message",
"role": "assistant",
"content": [
{
"type": "output_text",
"text": "hi",
}
],
})];
let item = ConversationItem {
path: PathBuf::from("/tmp/a.jsonl"),
head,
tail,
created_at: Some("2025-01-01T00:00:00Z".into()),
updated_at: Some("2025-01-01T01:00:00Z".into()),
};
let row = head_to_row(&item);
let expected_created = chrono::DateTime::parse_from_rfc3339("2025-01-01T00:00:00Z")
.unwrap()
.with_timezone(&Utc);
let expected_updated = chrono::DateTime::parse_from_rfc3339("2025-01-01T01:00:00Z")
.unwrap()
.with_timezone(&Utc);
assert_eq!(row.created_at, Some(expected_created));
assert_eq!(row.updated_at, Some(expected_updated));
}
#[test]
fn resume_table_snapshot() {
use crate::custom_terminal::Terminal;
use crate::test_backend::VT100Backend;
use ratatui::layout::Constraint;
use ratatui::layout::Layout;
let loader: PageLoader = Arc::new(|_| {});
let mut state =
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader);
let now = Utc::now();
let rows = vec![
Row {
path: PathBuf::from("/tmp/a.jsonl"),
preview: String::from("Fix resume picker timestamps"),
created_at: Some(now - Duration::minutes(16)),
updated_at: Some(now - Duration::seconds(42)),
},
Row {
path: PathBuf::from("/tmp/b.jsonl"),
preview: String::from("Investigate lazy pagination cap"),
created_at: Some(now - Duration::hours(1)),
updated_at: Some(now - Duration::minutes(35)),
},
Row {
path: PathBuf::from("/tmp/c.jsonl"),
preview: String::from("Explain the codebase"),
created_at: Some(now - Duration::hours(2)),
updated_at: Some(now - Duration::hours(2)),
},
];
state.all_rows = rows.clone();
state.filtered_rows = rows;
state.view_rows = Some(3);
state.selected = 1;
state.scroll_top = 0;
state.update_view_rows(3);
let metrics = calculate_column_metrics(&state.filtered_rows);
let width: u16 = 80;
let height: u16 = 6;
let backend = VT100Backend::new(width, height);
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, width, height));
{
let mut frame = terminal.get_frame();
let area = frame.area();
let segments =
Layout::vertical([Constraint::Length(1), Constraint::Min(1)]).split(area);
render_column_headers(&mut frame, segments[0], &metrics);
render_list(&mut frame, segments[1], &state, &metrics);
}
terminal.flush().expect("flush");
let snapshot = terminal.backend().to_string();
assert_snapshot!("resume_picker_table", snapshot);
}
#[test]
fn pageless_scrolling_deduplicates_and_keeps_order() {
let loader: PageLoader = Arc::new(|_| {});

View File

@@ -0,0 +1,8 @@
---
source: tui/src/resume_picker.rs
expression: snapshot
---
Created Updated Conversation
16 minutes ago 42 seconds ago Fix resume picker timestamps
> 1 hour ago 35 minutes ago Investigate lazy pagination cap
2 hours ago 2 hours ago Explain the codebase