pageless session list (#3194)
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
@@ -16,7 +18,10 @@ use ratatui::layout::Layout;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Stylize as _;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_stream::StreamExt;
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
|
||||
use crate::text_formatting::truncate_text;
|
||||
use crate::tui::FrameRequester;
|
||||
@@ -28,6 +33,7 @@ use codex_protocol::protocol::InputMessageKind;
|
||||
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
|
||||
const PAGE_SIZE: usize = 25;
|
||||
const LOAD_NEAR_THRESHOLD: usize = 5;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ResumeSelection {
|
||||
@@ -36,17 +42,63 @@ pub enum ResumeSelection {
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct PageLoadRequest {
|
||||
codex_home: PathBuf,
|
||||
cursor: Option<Cursor>,
|
||||
request_token: usize,
|
||||
search_token: Option<usize>,
|
||||
}
|
||||
|
||||
type PageLoader = Arc<dyn Fn(PageLoadRequest) + Send + Sync>;
|
||||
|
||||
enum BackgroundEvent {
|
||||
PageLoaded {
|
||||
request_token: usize,
|
||||
search_token: Option<usize>,
|
||||
page: std::io::Result<ConversationsPage>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Interactive session picker that lists recorded rollout files with simple
|
||||
/// search and pagination. Shows the first user input as the preview, relative
|
||||
/// time (e.g., "5 seconds ago"), and the absolute path.
|
||||
pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result<ResumeSelection> {
|
||||
let alt = AltScreenGuard::enter(tui);
|
||||
let mut state = PickerState::new(codex_home.to_path_buf(), alt.tui.frame_requester());
|
||||
state.load_page(None).await?;
|
||||
let (bg_tx, bg_rx) = mpsc::unbounded_channel();
|
||||
|
||||
let loader_tx = bg_tx.clone();
|
||||
let page_loader: PageLoader = Arc::new(move |request: PageLoadRequest| {
|
||||
let tx = loader_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let page = RolloutRecorder::list_conversations(
|
||||
&request.codex_home,
|
||||
PAGE_SIZE,
|
||||
request.cursor.as_ref(),
|
||||
)
|
||||
.await;
|
||||
let _ = tx.send(BackgroundEvent::PageLoaded {
|
||||
request_token: request.request_token,
|
||||
search_token: request.search_token,
|
||||
page,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let mut state = PickerState::new(
|
||||
codex_home.to_path_buf(),
|
||||
alt.tui.frame_requester(),
|
||||
page_loader,
|
||||
);
|
||||
state.load_initial_page().await?;
|
||||
state.request_frame();
|
||||
|
||||
let mut events = alt.tui.event_stream();
|
||||
while let Some(ev) = events.next().await {
|
||||
let mut tui_events = alt.tui.event_stream().fuse();
|
||||
let mut background_events = UnboundedReceiverStream::new(bg_rx).fuse();
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
Some(ev) = tui_events.next() => {
|
||||
match ev {
|
||||
TuiEvent::Key(key) => {
|
||||
if matches!(key.kind, KeyEventKind::Release) {
|
||||
@@ -57,12 +109,22 @@ pub async fn run_resume_picker(tui: &mut Tui, codex_home: &Path) -> Result<Resum
|
||||
}
|
||||
}
|
||||
TuiEvent::Draw => {
|
||||
if let Ok(size) = alt.tui.terminal.size() {
|
||||
let list_height = size.height.saturating_sub(3) as usize;
|
||||
state.update_view_rows(list_height);
|
||||
state.ensure_minimum_rows_for_view(list_height);
|
||||
}
|
||||
draw_picker(alt.tui, &state)?;
|
||||
}
|
||||
// Ignore paste and attach-image in picker
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Some(event) = background_events.next() => {
|
||||
state.handle_background_event(event)?;
|
||||
}
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback – treat as cancel/new
|
||||
Ok(ResumeSelection::StartFresh)
|
||||
@@ -89,22 +151,67 @@ impl Drop for AltScreenGuard<'_> {
|
||||
struct PickerState {
|
||||
codex_home: PathBuf,
|
||||
requester: FrameRequester,
|
||||
// pagination
|
||||
pagination: Pagination,
|
||||
// data
|
||||
all_rows: Vec<Row>, // unfiltered rows for current page
|
||||
pagination: PaginationState,
|
||||
all_rows: Vec<Row>,
|
||||
filtered_rows: Vec<Row>,
|
||||
seen_paths: HashSet<PathBuf>,
|
||||
selected: usize,
|
||||
// search
|
||||
scroll_top: usize,
|
||||
query: String,
|
||||
search_state: SearchState,
|
||||
next_request_token: usize,
|
||||
next_search_token: usize,
|
||||
page_loader: PageLoader,
|
||||
view_rows: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Pagination {
|
||||
current_anchor: Option<Cursor>,
|
||||
backstack: Vec<Option<Cursor>>, // track previous anchors for ←/a
|
||||
struct PaginationState {
|
||||
next_cursor: Option<Cursor>,
|
||||
page_index: usize,
|
||||
num_scanned_files: usize,
|
||||
reached_scan_cap: bool,
|
||||
loading: LoadingState,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum LoadingState {
|
||||
Idle,
|
||||
Pending(PendingLoad),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct PendingLoad {
|
||||
request_token: usize,
|
||||
search_token: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum SearchState {
|
||||
Idle,
|
||||
Active { token: usize },
|
||||
}
|
||||
|
||||
enum LoadTrigger {
|
||||
Scroll,
|
||||
Search { token: usize },
|
||||
}
|
||||
|
||||
impl LoadingState {
|
||||
fn is_pending(&self) -> bool {
|
||||
matches!(self, LoadingState::Pending(_))
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn active_token(&self) -> Option<usize> {
|
||||
match self {
|
||||
SearchState::Idle => None,
|
||||
SearchState::Active { token } => Some(*token),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_active(&self) -> bool {
|
||||
self.active_token().is_some()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -115,20 +222,27 @@ struct Row {
|
||||
}
|
||||
|
||||
impl PickerState {
|
||||
fn new(codex_home: PathBuf, requester: FrameRequester) -> Self {
|
||||
fn new(codex_home: PathBuf, requester: FrameRequester, page_loader: PageLoader) -> Self {
|
||||
Self {
|
||||
codex_home,
|
||||
requester,
|
||||
pagination: Pagination {
|
||||
current_anchor: None,
|
||||
backstack: vec![None],
|
||||
pagination: PaginationState {
|
||||
next_cursor: None,
|
||||
page_index: 0,
|
||||
num_scanned_files: 0,
|
||||
reached_scan_cap: false,
|
||||
loading: LoadingState::Idle,
|
||||
},
|
||||
all_rows: Vec::new(),
|
||||
filtered_rows: Vec::new(),
|
||||
seen_paths: HashSet::new(),
|
||||
selected: 0,
|
||||
scroll_top: 0,
|
||||
query: String::new(),
|
||||
search_state: SearchState::Idle,
|
||||
next_request_token: 0,
|
||||
next_search_token: 0,
|
||||
page_loader,
|
||||
view_rows: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -154,24 +268,40 @@ impl PickerState {
|
||||
KeyCode::Up => {
|
||||
if self.selected > 0 {
|
||||
self.selected -= 1;
|
||||
self.ensure_selected_visible();
|
||||
}
|
||||
self.request_frame();
|
||||
}
|
||||
KeyCode::Down => {
|
||||
if self.selected + 1 < self.filtered_rows.len() {
|
||||
self.selected += 1;
|
||||
self.ensure_selected_visible();
|
||||
}
|
||||
self.maybe_load_more_for_scroll();
|
||||
self.request_frame();
|
||||
}
|
||||
KeyCode::Left | KeyCode::Char('a') => {
|
||||
self.prev_page().await?;
|
||||
KeyCode::PageUp => {
|
||||
let step = self.view_rows.unwrap_or(10).max(1);
|
||||
if self.selected > 0 {
|
||||
self.selected = self.selected.saturating_sub(step);
|
||||
self.ensure_selected_visible();
|
||||
self.request_frame();
|
||||
}
|
||||
}
|
||||
KeyCode::PageDown => {
|
||||
if !self.filtered_rows.is_empty() {
|
||||
let step = self.view_rows.unwrap_or(10).max(1);
|
||||
let max_index = self.filtered_rows.len().saturating_sub(1);
|
||||
self.selected = (self.selected + step).min(max_index);
|
||||
self.ensure_selected_visible();
|
||||
self.maybe_load_more_for_scroll();
|
||||
self.request_frame();
|
||||
}
|
||||
KeyCode::Right | KeyCode::Char('d') => {
|
||||
self.next_page().await?;
|
||||
}
|
||||
KeyCode::Backspace => {
|
||||
self.query.pop();
|
||||
self.apply_filter();
|
||||
let mut new_query = self.query.clone();
|
||||
new_query.pop();
|
||||
self.set_query(new_query);
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
// basic text input for search
|
||||
@@ -180,8 +310,9 @@ impl PickerState {
|
||||
.contains(crossterm::event::KeyModifiers::CONTROL)
|
||||
&& !key.modifiers.contains(crossterm::event::KeyModifiers::ALT)
|
||||
{
|
||||
self.query.push(c);
|
||||
self.apply_filter();
|
||||
let mut new_query = self.query.clone();
|
||||
new_query.push(c);
|
||||
self.set_query(new_query);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -189,49 +320,71 @@ impl PickerState {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn prev_page(&mut self) -> Result<()> {
|
||||
if self.pagination.page_index == 0 {
|
||||
async fn load_initial_page(&mut self) -> Result<()> {
|
||||
let page = RolloutRecorder::list_conversations(&self.codex_home, PAGE_SIZE, None).await?;
|
||||
self.reset_pagination();
|
||||
self.all_rows.clear();
|
||||
self.filtered_rows.clear();
|
||||
self.seen_paths.clear();
|
||||
self.search_state = SearchState::Idle;
|
||||
self.selected = 0;
|
||||
self.ingest_page(page);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_background_event(&mut self, event: BackgroundEvent) -> Result<()> {
|
||||
match event {
|
||||
BackgroundEvent::PageLoaded {
|
||||
request_token,
|
||||
search_token,
|
||||
page,
|
||||
} => {
|
||||
let pending = match self.pagination.loading {
|
||||
LoadingState::Pending(pending) => pending,
|
||||
LoadingState::Idle => return Ok(()),
|
||||
};
|
||||
if pending.request_token != request_token {
|
||||
return Ok(());
|
||||
}
|
||||
// current_anchor points to the page we just loaded; backstack[page_index-1] is the anchor to reload
|
||||
if self.pagination.page_index > 0 {
|
||||
self.pagination.page_index -= 1;
|
||||
let anchor = self
|
||||
self.pagination.loading = LoadingState::Idle;
|
||||
let page = page.map_err(color_eyre::Report::from)?;
|
||||
self.ingest_page(page);
|
||||
let completed_token = pending.search_token.or(search_token);
|
||||
self.continue_search_if_token_matches(completed_token);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn reset_pagination(&mut self) {
|
||||
self.pagination.next_cursor = None;
|
||||
self.pagination.num_scanned_files = 0;
|
||||
self.pagination.reached_scan_cap = false;
|
||||
self.pagination.loading = LoadingState::Idle;
|
||||
}
|
||||
|
||||
fn ingest_page(&mut self, page: ConversationsPage) {
|
||||
if let Some(cursor) = page.next_cursor.clone() {
|
||||
self.pagination.next_cursor = Some(cursor);
|
||||
} else {
|
||||
self.pagination.next_cursor = None;
|
||||
}
|
||||
self.pagination.num_scanned_files = self
|
||||
.pagination
|
||||
.backstack
|
||||
.get(self.pagination.page_index)
|
||||
.cloned()
|
||||
.flatten();
|
||||
self.pagination.current_anchor = anchor.clone();
|
||||
self.load_page(anchor.as_ref()).await?;
|
||||
}
|
||||
Ok(())
|
||||
.num_scanned_files
|
||||
.saturating_add(page.num_scanned_files);
|
||||
if page.reached_scan_cap {
|
||||
self.pagination.reached_scan_cap = true;
|
||||
}
|
||||
|
||||
async fn next_page(&mut self) -> Result<()> {
|
||||
if let Some(next) = self.pagination.next_cursor.clone() {
|
||||
// Record the anchor for the page we are moving to at index new_index
|
||||
let new_index = self.pagination.page_index + 1;
|
||||
if self.pagination.backstack.len() <= new_index {
|
||||
self.pagination.backstack.resize(new_index + 1, None);
|
||||
let rows = rows_from_items(page.items);
|
||||
for row in rows {
|
||||
if self.seen_paths.insert(row.path.clone()) {
|
||||
self.all_rows.push(row);
|
||||
}
|
||||
self.pagination.backstack[new_index] = Some(next.clone());
|
||||
self.pagination.current_anchor = Some(next.clone());
|
||||
self.pagination.page_index = new_index;
|
||||
let anchor = self.pagination.current_anchor.clone();
|
||||
self.load_page(anchor.as_ref()).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_page(&mut self, anchor: Option<&Cursor>) -> Result<()> {
|
||||
let page = RolloutRecorder::list_conversations(&self.codex_home, PAGE_SIZE, anchor).await?;
|
||||
self.pagination.next_cursor = page.next_cursor.clone();
|
||||
self.all_rows = to_rows(page);
|
||||
self.apply_filter();
|
||||
// reset selection on new page
|
||||
self.selected = 0;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_filter(&mut self) {
|
||||
@@ -249,12 +402,165 @@ impl PickerState {
|
||||
if self.selected >= self.filtered_rows.len() {
|
||||
self.selected = self.filtered_rows.len().saturating_sub(1);
|
||||
}
|
||||
if self.filtered_rows.is_empty() {
|
||||
self.scroll_top = 0;
|
||||
}
|
||||
self.ensure_selected_visible();
|
||||
self.request_frame();
|
||||
}
|
||||
|
||||
fn set_query(&mut self, new_query: String) {
|
||||
if self.query == new_query {
|
||||
return;
|
||||
}
|
||||
self.query = new_query;
|
||||
self.selected = 0;
|
||||
self.apply_filter();
|
||||
if self.query.is_empty() {
|
||||
self.search_state = SearchState::Idle;
|
||||
return;
|
||||
}
|
||||
if !self.filtered_rows.is_empty() {
|
||||
self.search_state = SearchState::Idle;
|
||||
return;
|
||||
}
|
||||
if self.pagination.reached_scan_cap || self.pagination.next_cursor.is_none() {
|
||||
self.search_state = SearchState::Idle;
|
||||
return;
|
||||
}
|
||||
let token = self.allocate_search_token();
|
||||
self.search_state = SearchState::Active { token };
|
||||
self.load_more_if_needed(LoadTrigger::Search { token });
|
||||
}
|
||||
|
||||
fn continue_search_if_needed(&mut self) {
|
||||
let Some(token) = self.search_state.active_token() else {
|
||||
return;
|
||||
};
|
||||
if !self.filtered_rows.is_empty() {
|
||||
self.search_state = SearchState::Idle;
|
||||
return;
|
||||
}
|
||||
if self.pagination.reached_scan_cap || self.pagination.next_cursor.is_none() {
|
||||
self.search_state = SearchState::Idle;
|
||||
return;
|
||||
}
|
||||
self.load_more_if_needed(LoadTrigger::Search { token });
|
||||
}
|
||||
|
||||
fn continue_search_if_token_matches(&mut self, completed_token: Option<usize>) {
|
||||
let Some(active) = self.search_state.active_token() else {
|
||||
return;
|
||||
};
|
||||
if let Some(token) = completed_token
|
||||
&& token != active
|
||||
{
|
||||
return;
|
||||
}
|
||||
self.continue_search_if_needed();
|
||||
}
|
||||
|
||||
fn ensure_selected_visible(&mut self) {
|
||||
if self.filtered_rows.is_empty() {
|
||||
self.scroll_top = 0;
|
||||
return;
|
||||
}
|
||||
let capacity = self.view_rows.unwrap_or(self.filtered_rows.len()).max(1);
|
||||
|
||||
if self.selected < self.scroll_top {
|
||||
self.scroll_top = self.selected;
|
||||
} else {
|
||||
let last_visible = self.scroll_top.saturating_add(capacity - 1);
|
||||
if self.selected > last_visible {
|
||||
self.scroll_top = self.selected.saturating_sub(capacity - 1);
|
||||
}
|
||||
}
|
||||
|
||||
let max_start = self.filtered_rows.len().saturating_sub(capacity);
|
||||
if self.scroll_top > max_start {
|
||||
self.scroll_top = max_start;
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_minimum_rows_for_view(&mut self, minimum_rows: usize) {
|
||||
if minimum_rows == 0 {
|
||||
return;
|
||||
}
|
||||
if self.filtered_rows.len() >= minimum_rows {
|
||||
return;
|
||||
}
|
||||
if self.pagination.loading.is_pending() || self.pagination.next_cursor.is_none() {
|
||||
return;
|
||||
}
|
||||
if let Some(token) = self.search_state.active_token() {
|
||||
self.load_more_if_needed(LoadTrigger::Search { token });
|
||||
} else {
|
||||
self.load_more_if_needed(LoadTrigger::Scroll);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_view_rows(&mut self, rows: usize) {
|
||||
self.view_rows = if rows == 0 { None } else { Some(rows) };
|
||||
self.ensure_selected_visible();
|
||||
}
|
||||
|
||||
fn maybe_load_more_for_scroll(&mut self) {
|
||||
if self.pagination.loading.is_pending() {
|
||||
return;
|
||||
}
|
||||
if self.pagination.next_cursor.is_none() {
|
||||
return;
|
||||
}
|
||||
if self.filtered_rows.is_empty() {
|
||||
return;
|
||||
}
|
||||
let remaining = self.filtered_rows.len().saturating_sub(self.selected + 1);
|
||||
if remaining <= LOAD_NEAR_THRESHOLD {
|
||||
self.load_more_if_needed(LoadTrigger::Scroll);
|
||||
}
|
||||
}
|
||||
|
||||
fn load_more_if_needed(&mut self, trigger: LoadTrigger) {
|
||||
if self.pagination.loading.is_pending() {
|
||||
return;
|
||||
}
|
||||
let Some(cursor) = self.pagination.next_cursor.clone() else {
|
||||
return;
|
||||
};
|
||||
let request_token = self.allocate_request_token();
|
||||
let search_token = match trigger {
|
||||
LoadTrigger::Scroll => None,
|
||||
LoadTrigger::Search { token } => Some(token),
|
||||
};
|
||||
self.pagination.loading = LoadingState::Pending(PendingLoad {
|
||||
request_token,
|
||||
search_token,
|
||||
});
|
||||
self.request_frame();
|
||||
|
||||
(self.page_loader)(PageLoadRequest {
|
||||
codex_home: self.codex_home.clone(),
|
||||
cursor: Some(cursor),
|
||||
request_token,
|
||||
search_token,
|
||||
});
|
||||
}
|
||||
|
||||
fn allocate_request_token(&mut self) -> usize {
|
||||
let token = self.next_request_token;
|
||||
self.next_request_token = self.next_request_token.wrapping_add(1);
|
||||
token
|
||||
}
|
||||
|
||||
fn allocate_search_token(&mut self) -> usize {
|
||||
let token = self.next_search_token;
|
||||
self.next_search_token = self.next_search_token.wrapping_add(1);
|
||||
token
|
||||
}
|
||||
}
|
||||
|
||||
fn to_rows(page: ConversationsPage) -> Vec<Row> {
|
||||
page.items.into_iter().map(|it| head_to_row(&it)).collect()
|
||||
fn rows_from_items(items: Vec<ConversationItem>) -> Vec<Row> {
|
||||
items.into_iter().map(|item| head_to_row(&item)).collect()
|
||||
}
|
||||
|
||||
fn head_to_row(item: &ConversationItem) -> Row {
|
||||
@@ -350,14 +656,15 @@ fn draw_picker(tui: &mut Tui, state: &PickerState) -> std::io::Result<()> {
|
||||
let hint_line: Line = vec![
|
||||
"Enter".bold(),
|
||||
" to resume ".into(),
|
||||
"• ".dim(),
|
||||
"Esc".bold(),
|
||||
" to start new ".into(),
|
||||
"• ".dim(),
|
||||
"Ctrl+C".into(),
|
||||
" to quit ".dim(),
|
||||
"←/a".into(),
|
||||
" prev ".dim(),
|
||||
"→/d".into(),
|
||||
" next".dim(),
|
||||
" to quit ".into(),
|
||||
"• ".dim(),
|
||||
"↑/↓".into(),
|
||||
" to browse".dim(),
|
||||
]
|
||||
.into();
|
||||
frame.render_widget_ref(hint_line, hint);
|
||||
@@ -365,19 +672,23 @@ fn draw_picker(tui: &mut Tui, state: &PickerState) -> std::io::Result<()> {
|
||||
}
|
||||
|
||||
fn render_list(frame: &mut crate::custom_terminal::Frame, area: Rect, state: &PickerState) {
|
||||
let rows = &state.filtered_rows;
|
||||
if rows.is_empty() {
|
||||
frame.render_widget_ref(Line::from("No sessions found".italic().dim()), area);
|
||||
if area.height == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Compute how many rows fit (1 line per item)
|
||||
let capacity = area.height as usize;
|
||||
let start = state.selected.saturating_sub(capacity.saturating_sub(1));
|
||||
let visible = &rows[start..rows.len().min(start + capacity)];
|
||||
let rows = &state.filtered_rows;
|
||||
if rows.is_empty() {
|
||||
let message = render_empty_state_line(state);
|
||||
frame.render_widget_ref(message, area);
|
||||
return;
|
||||
}
|
||||
|
||||
let capacity = area.height as usize;
|
||||
let start = state.scroll_top.min(rows.len().saturating_sub(1));
|
||||
let end = rows.len().min(start + capacity);
|
||||
let mut y = area.y;
|
||||
for (idx, row) in visible.iter().enumerate() {
|
||||
|
||||
for (idx, row) in rows[start..end].iter().enumerate() {
|
||||
let is_sel = start + idx == state.selected;
|
||||
let marker = if is_sel { "> ".bold() } else { " ".into() };
|
||||
let ts = row
|
||||
@@ -393,6 +704,40 @@ fn render_list(frame: &mut crate::custom_terminal::Frame, area: Rect, state: &Pi
|
||||
frame.render_widget_ref(line, rect);
|
||||
y = y.saturating_add(1);
|
||||
}
|
||||
|
||||
if state.pagination.loading.is_pending() && y < area.y.saturating_add(area.height) {
|
||||
let loading_line: Line = vec![" ".into(), "Loading older sessions…".italic().dim()].into();
|
||||
let rect = Rect::new(area.x, y, area.width, 1);
|
||||
frame.render_widget_ref(loading_line, rect);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_empty_state_line(state: &PickerState) -> Line<'static> {
|
||||
if !state.query.is_empty() {
|
||||
if state.search_state.is_active()
|
||||
|| (state.pagination.loading.is_pending() && state.pagination.next_cursor.is_some())
|
||||
{
|
||||
return vec!["Searching…".italic().dim()].into();
|
||||
}
|
||||
if state.pagination.reached_scan_cap {
|
||||
let msg = format!(
|
||||
"Search scanned first {} sessions; more may exist",
|
||||
state.pagination.num_scanned_files
|
||||
);
|
||||
return vec![Span::from(msg).italic().dim()].into();
|
||||
}
|
||||
return vec!["No results for your search".italic().dim()].into();
|
||||
}
|
||||
|
||||
if state.all_rows.is_empty() && state.pagination.num_scanned_files == 0 {
|
||||
return vec!["No sessions yet".italic().dim()].into();
|
||||
}
|
||||
|
||||
if state.pagination.loading.is_pending() {
|
||||
return vec!["Loading older sessions…".italic().dim()].into();
|
||||
}
|
||||
|
||||
vec!["No sessions yet".italic().dim()].into()
|
||||
}
|
||||
|
||||
fn human_time_ago(ts: DateTime<Utc>) -> String {
|
||||
@@ -433,7 +778,13 @@ fn human_time_ago(ts: DateTime<Utc>) -> String {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
use serde_json::json;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
fn head_with_ts_and_user_text(ts: &str, texts: &[&str]) -> Vec<serde_json::Value> {
|
||||
vec![
|
||||
@@ -449,6 +800,40 @@ mod tests {
|
||||
]
|
||||
}
|
||||
|
||||
fn make_item(path: &str, ts: &str, preview: &str) -> ConversationItem {
|
||||
ConversationItem {
|
||||
path: PathBuf::from(path),
|
||||
head: head_with_ts_and_user_text(ts, &[preview]),
|
||||
}
|
||||
}
|
||||
|
||||
fn cursor_from_str(repr: &str) -> Cursor {
|
||||
serde_json::from_str::<Cursor>(&format!("\"{repr}\""))
|
||||
.expect("cursor format should deserialize")
|
||||
}
|
||||
|
||||
fn page(
|
||||
items: Vec<ConversationItem>,
|
||||
next_cursor: Option<Cursor>,
|
||||
num_scanned_files: usize,
|
||||
reached_scan_cap: bool,
|
||||
) -> ConversationsPage {
|
||||
ConversationsPage {
|
||||
items,
|
||||
next_cursor,
|
||||
num_scanned_files,
|
||||
reached_scan_cap,
|
||||
}
|
||||
}
|
||||
|
||||
fn block_on_future<F: Future<Output = T>, T>(future: F) -> T {
|
||||
tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap()
|
||||
.block_on(future)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn preview_uses_first_message_input_text() {
|
||||
let head = vec![
|
||||
@@ -473,7 +858,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_rows_preserves_backend_order() {
|
||||
fn rows_from_items_preserves_backend_order() {
|
||||
// Construct two items with different timestamps and real user text.
|
||||
let a = ConversationItem {
|
||||
path: PathBuf::from("/tmp/a.jsonl"),
|
||||
@@ -483,15 +868,283 @@ mod tests {
|
||||
path: PathBuf::from("/tmp/b.jsonl"),
|
||||
head: head_with_ts_and_user_text("2025-01-02T00:00:00Z", &["B"]),
|
||||
};
|
||||
let rows = to_rows(ConversationsPage {
|
||||
items: vec![a, b],
|
||||
next_cursor: None,
|
||||
num_scanned_files: 0,
|
||||
reached_scan_cap: false,
|
||||
});
|
||||
let rows = rows_from_items(vec![a, b]);
|
||||
assert_eq!(rows.len(), 2);
|
||||
// Preserve the given order; backend already provides newest-first
|
||||
// Preserve the given order even if timestamps differ; backend already provides newest-first.
|
||||
assert!(rows[0].preview.contains('A'));
|
||||
assert!(rows[1].preview.contains('B'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pageless_scrolling_deduplicates_and_keeps_order() {
|
||||
let loader: PageLoader = Arc::new(|_| {});
|
||||
let mut state =
|
||||
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader);
|
||||
|
||||
state.reset_pagination();
|
||||
state.ingest_page(page(
|
||||
vec![
|
||||
make_item("/tmp/a.jsonl", "2025-01-03T00:00:00Z", "third"),
|
||||
make_item("/tmp/b.jsonl", "2025-01-02T00:00:00Z", "second"),
|
||||
],
|
||||
Some(cursor_from_str(
|
||||
"2025-01-02T00-00-00|00000000-0000-0000-0000-000000000000",
|
||||
)),
|
||||
2,
|
||||
false,
|
||||
));
|
||||
|
||||
state.ingest_page(page(
|
||||
vec![
|
||||
make_item("/tmp/a.jsonl", "2025-01-03T00:00:00Z", "duplicate"),
|
||||
make_item("/tmp/c.jsonl", "2025-01-01T00:00:00Z", "first"),
|
||||
],
|
||||
Some(cursor_from_str(
|
||||
"2025-01-01T00-00-00|00000000-0000-0000-0000-000000000001",
|
||||
)),
|
||||
2,
|
||||
false,
|
||||
));
|
||||
|
||||
state.ingest_page(page(
|
||||
vec![make_item(
|
||||
"/tmp/d.jsonl",
|
||||
"2024-12-31T23:00:00Z",
|
||||
"very old",
|
||||
)],
|
||||
None,
|
||||
1,
|
||||
false,
|
||||
));
|
||||
|
||||
let previews: Vec<_> = state
|
||||
.filtered_rows
|
||||
.iter()
|
||||
.map(|row| row.preview.as_str())
|
||||
.collect();
|
||||
assert_eq!(previews, vec!["third", "second", "first", "very old"]);
|
||||
|
||||
let unique_paths = state
|
||||
.filtered_rows
|
||||
.iter()
|
||||
.map(|row| row.path.clone())
|
||||
.collect::<std::collections::HashSet<_>>();
|
||||
assert_eq!(unique_paths.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ensure_minimum_rows_prefetches_when_underfilled() {
|
||||
let recorded_requests: Arc<Mutex<Vec<PageLoadRequest>>> = Arc::new(Mutex::new(Vec::new()));
|
||||
let request_sink = recorded_requests.clone();
|
||||
let loader: PageLoader = Arc::new(move |req: PageLoadRequest| {
|
||||
request_sink.lock().unwrap().push(req);
|
||||
});
|
||||
|
||||
let mut state =
|
||||
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader);
|
||||
state.reset_pagination();
|
||||
state.ingest_page(page(
|
||||
vec![
|
||||
make_item("/tmp/a.jsonl", "2025-01-01T00:00:00Z", "one"),
|
||||
make_item("/tmp/b.jsonl", "2025-01-02T00:00:00Z", "two"),
|
||||
],
|
||||
Some(cursor_from_str(
|
||||
"2025-01-03T00-00-00|00000000-0000-0000-0000-000000000000",
|
||||
)),
|
||||
2,
|
||||
false,
|
||||
));
|
||||
|
||||
assert!(recorded_requests.lock().unwrap().is_empty());
|
||||
state.ensure_minimum_rows_for_view(10);
|
||||
let guard = recorded_requests.lock().unwrap();
|
||||
assert_eq!(guard.len(), 1);
|
||||
assert!(guard[0].search_token.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn page_navigation_uses_view_rows() {
|
||||
let loader: PageLoader = Arc::new(|_| {});
|
||||
let mut state =
|
||||
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader);
|
||||
|
||||
let mut items = Vec::new();
|
||||
for idx in 0..20 {
|
||||
let ts = format!("2025-01-{:02}T00:00:00Z", idx + 1);
|
||||
let preview = format!("item-{idx}");
|
||||
let path = format!("/tmp/item-{idx}.jsonl");
|
||||
items.push(make_item(&path, &ts, &preview));
|
||||
}
|
||||
|
||||
state.reset_pagination();
|
||||
state.ingest_page(page(items, None, 20, false));
|
||||
state.update_view_rows(5);
|
||||
|
||||
assert_eq!(state.selected, 0);
|
||||
block_on_future(async {
|
||||
state
|
||||
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
assert_eq!(state.selected, 5);
|
||||
|
||||
block_on_future(async {
|
||||
state
|
||||
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
assert_eq!(state.selected, 10);
|
||||
|
||||
block_on_future(async {
|
||||
state
|
||||
.handle_key(KeyEvent::new(KeyCode::PageUp, KeyModifiers::NONE))
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
assert_eq!(state.selected, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn up_at_bottom_does_not_scroll_when_visible() {
|
||||
let loader: PageLoader = Arc::new(|_| {});
|
||||
let mut state =
|
||||
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader);
|
||||
|
||||
let mut items = Vec::new();
|
||||
for idx in 0..10 {
|
||||
let ts = format!("2025-02-{:02}T00:00:00Z", idx + 1);
|
||||
let preview = format!("item-{idx}");
|
||||
let path = format!("/tmp/item-{idx}.jsonl");
|
||||
items.push(make_item(&path, &ts, &preview));
|
||||
}
|
||||
|
||||
state.reset_pagination();
|
||||
state.ingest_page(page(items, None, 10, false));
|
||||
state.update_view_rows(5);
|
||||
|
||||
state.selected = state.filtered_rows.len().saturating_sub(1);
|
||||
state.ensure_selected_visible();
|
||||
|
||||
let initial_top = state.scroll_top;
|
||||
assert_eq!(initial_top, state.filtered_rows.len().saturating_sub(5));
|
||||
|
||||
block_on_future(async {
|
||||
state
|
||||
.handle_key(KeyEvent::new(KeyCode::Up, KeyModifiers::NONE))
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
assert_eq!(state.scroll_top, initial_top);
|
||||
assert_eq!(state.selected, state.filtered_rows.len().saturating_sub(2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_query_loads_until_match_and_respects_scan_cap() {
|
||||
let recorded_requests: Arc<Mutex<Vec<PageLoadRequest>>> = Arc::new(Mutex::new(Vec::new()));
|
||||
let request_sink = recorded_requests.clone();
|
||||
let loader: PageLoader = Arc::new(move |req: PageLoadRequest| {
|
||||
request_sink.lock().unwrap().push(req);
|
||||
});
|
||||
|
||||
let mut state =
|
||||
PickerState::new(PathBuf::from("/tmp"), FrameRequester::test_dummy(), loader);
|
||||
state.reset_pagination();
|
||||
state.ingest_page(page(
|
||||
vec![make_item(
|
||||
"/tmp/start.jsonl",
|
||||
"2025-01-01T00:00:00Z",
|
||||
"alpha",
|
||||
)],
|
||||
Some(cursor_from_str(
|
||||
"2025-01-02T00-00-00|00000000-0000-0000-0000-000000000000",
|
||||
)),
|
||||
1,
|
||||
false,
|
||||
));
|
||||
recorded_requests.lock().unwrap().clear();
|
||||
|
||||
state.set_query("target".to_string());
|
||||
let first_request = {
|
||||
let guard = recorded_requests.lock().unwrap();
|
||||
assert_eq!(guard.len(), 1);
|
||||
guard[0].clone()
|
||||
};
|
||||
|
||||
state
|
||||
.handle_background_event(BackgroundEvent::PageLoaded {
|
||||
request_token: first_request.request_token,
|
||||
search_token: first_request.search_token,
|
||||
page: Ok(page(
|
||||
vec![make_item("/tmp/beta.jsonl", "2025-01-02T00:00:00Z", "beta")],
|
||||
Some(cursor_from_str(
|
||||
"2025-01-03T00-00-00|00000000-0000-0000-0000-000000000001",
|
||||
)),
|
||||
5,
|
||||
false,
|
||||
)),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let second_request = {
|
||||
let guard = recorded_requests.lock().unwrap();
|
||||
assert_eq!(guard.len(), 2);
|
||||
guard[1].clone()
|
||||
};
|
||||
assert!(state.search_state.is_active());
|
||||
assert!(state.filtered_rows.is_empty());
|
||||
|
||||
state
|
||||
.handle_background_event(BackgroundEvent::PageLoaded {
|
||||
request_token: second_request.request_token,
|
||||
search_token: second_request.search_token,
|
||||
page: Ok(page(
|
||||
vec![make_item(
|
||||
"/tmp/match.jsonl",
|
||||
"2025-01-03T00:00:00Z",
|
||||
"target log",
|
||||
)],
|
||||
Some(cursor_from_str(
|
||||
"2025-01-04T00-00-00|00000000-0000-0000-0000-000000000002",
|
||||
)),
|
||||
7,
|
||||
false,
|
||||
)),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert!(!state.filtered_rows.is_empty());
|
||||
assert!(!state.search_state.is_active());
|
||||
|
||||
recorded_requests.lock().unwrap().clear();
|
||||
state.set_query("missing".to_string());
|
||||
let active_request = {
|
||||
let guard = recorded_requests.lock().unwrap();
|
||||
assert_eq!(guard.len(), 1);
|
||||
guard[0].clone()
|
||||
};
|
||||
|
||||
state
|
||||
.handle_background_event(BackgroundEvent::PageLoaded {
|
||||
request_token: second_request.request_token,
|
||||
search_token: second_request.search_token,
|
||||
page: Ok(page(Vec::new(), None, 0, false)),
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(recorded_requests.lock().unwrap().len(), 1);
|
||||
|
||||
state
|
||||
.handle_background_event(BackgroundEvent::PageLoaded {
|
||||
request_token: active_request.request_token,
|
||||
search_token: active_request.search_token,
|
||||
page: Ok(page(Vec::new(), None, 3, true)),
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert!(state.filtered_rows.is_empty());
|
||||
assert!(!state.search_state.is_active());
|
||||
assert!(state.pagination.reached_scan_cap);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user