Reland "refactor transcript view to handle HistoryCells" (#3753)

Reland of #3538
This commit is contained in:
Jeremy Rose
2025-09-18 13:55:53 -07:00
committed by GitHub
parent 71038381aa
commit b34e906396
13 changed files with 477 additions and 405 deletions

View File

@@ -130,7 +130,7 @@ use codex_protocol::models::ResponseItem;
use codex_protocol::models::ShellToolCallParams;
use codex_protocol::protocol::InitialHistory;
mod compact;
pub mod compact;
use self::compact::build_compacted_history;
use self::compact::collect_user_messages;
@@ -710,7 +710,7 @@ impl Session {
self.persist_rollout_items(&rollout_items).await;
}
fn build_initial_context(&self, turn_context: &TurnContext) -> Vec<ResponseItem> {
pub(crate) fn build_initial_context(&self, turn_context: &TurnContext) -> Vec<ResponseItem> {
let mut items = Vec::<ResponseItem>::with_capacity(2);
if let Some(user_instructions) = turn_context.user_instructions.as_deref() {
items.push(UserInstructions::new(user_instructions.to_string()).into());
@@ -3325,6 +3325,9 @@ async fn exit_review_mode(
.await;
}
#[cfg(test)]
pub(crate) use tests::make_session_and_context;
#[cfg(test)]
mod tests {
use super::*;
@@ -3565,7 +3568,7 @@ mod tests {
})
}
fn make_session_and_context() -> (Session, TurnContext) {
pub(crate) fn make_session_and_context() -> (Session, TurnContext) {
let (tx_event, _rx_event) = async_channel::unbounded();
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = Config::load_from_base_config_with_overrides(

View File

@@ -203,7 +203,7 @@ async fn run_compact_task_inner(
sess.send_event(event).await;
}
fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
pub fn content_items_to_text(content: &[ContentItem]) -> Option<String> {
let mut pieces = Vec::new();
for item in content {
match item {
@@ -235,7 +235,7 @@ pub(crate) fn collect_user_messages(items: &[ResponseItem]) -> Vec<String> {
.collect()
}
fn is_session_prefix_message(text: &str) -> bool {
pub fn is_session_prefix_message(text: &str) -> bool {
matches!(
InputMessageKind::from(("user", text)),
InputMessageKind::UserInstructions | InputMessageKind::EnvironmentContext

View File

@@ -3,6 +3,8 @@ use crate::CodexAuth;
use crate::codex::Codex;
use crate::codex::CodexSpawnOk;
use crate::codex::INITIAL_SUBMIT_ID;
use crate::codex::compact::content_items_to_text;
use crate::codex::compact::is_session_prefix_message;
use crate::codex_conversation::CodexConversation;
use crate::config::Config;
use crate::error::CodexErr;
@@ -134,19 +136,19 @@ impl ConversationManager {
self.conversations.write().await.remove(conversation_id)
}
/// Fork an existing conversation by dropping the last `drop_last_messages`
/// user/assistant messages from its transcript and starting a new
/// Fork an existing conversation by taking messages up to the given position
/// (not including the message at the given position) and starting a new
/// conversation with identical configuration (unless overridden by the
/// caller's `config`). The new conversation will have a fresh id.
pub async fn fork_conversation(
&self,
num_messages_to_drop: usize,
nth_user_message: usize,
config: Config,
path: PathBuf,
) -> CodexResult<NewConversation> {
// Compute the prefix up to the cut point.
let history = RolloutRecorder::get_rollout_history(&path).await?;
let history = truncate_after_dropping_last_messages(history, num_messages_to_drop);
let history = truncate_before_nth_user_message(history, nth_user_message);
// Spawn a new conversation with the computed initial history.
let auth_manager = self.auth_manager.clone();
@@ -159,33 +161,30 @@ impl ConversationManager {
}
}
/// Return a prefix of `items` obtained by dropping the last `n` user messages
/// and all items that follow them.
fn truncate_after_dropping_last_messages(history: InitialHistory, n: usize) -> InitialHistory {
if n == 0 {
return InitialHistory::Forked(history.get_rollout_items());
}
// Work directly on rollout items, and cut the vector at the nth-from-last user message input.
/// Return a prefix of `items` obtained by cutting strictly before the nth user message
/// (0-based) and all items that follow it.
fn truncate_before_nth_user_message(history: InitialHistory, n: usize) -> InitialHistory {
// Work directly on rollout items, and cut the vector at the nth user message input.
let items: Vec<RolloutItem> = history.get_rollout_items();
// Find indices of user message inputs in rollout order.
let mut user_positions: Vec<usize> = Vec::new();
for (idx, item) in items.iter().enumerate() {
if let RolloutItem::ResponseItem(ResponseItem::Message { role, .. }) = item
if let RolloutItem::ResponseItem(ResponseItem::Message { role, content, .. }) = item
&& role == "user"
&& content_items_to_text(content).is_some_and(|text| !is_session_prefix_message(&text))
{
user_positions.push(idx);
}
}
// If fewer than n user messages exist, treat as empty.
if user_positions.len() < n {
// If fewer than or equal to n user messages exist, treat as empty (out of range).
if user_positions.len() <= n {
return InitialHistory::New;
}
// Cut strictly before the nth-from-last user message (do not keep the nth itself).
let cut_idx = user_positions[user_positions.len() - n];
// Cut strictly before the nth user message (do not keep the nth itself).
let cut_idx = user_positions[n];
let rolled: Vec<RolloutItem> = items.into_iter().take(cut_idx).collect();
if rolled.is_empty() {
@@ -198,9 +197,11 @@ fn truncate_after_dropping_last_messages(history: InitialHistory, n: usize) -> I
#[cfg(test)]
mod tests {
use super::*;
use crate::codex::make_session_and_context;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ReasoningItemReasoningSummary;
use codex_protocol::models::ResponseItem;
use pretty_assertions::assert_eq;
fn user_msg(text: &str) -> ResponseItem {
ResponseItem::Message {
@@ -252,7 +253,7 @@ mod tests {
.cloned()
.map(RolloutItem::ResponseItem)
.collect();
let truncated = truncate_after_dropping_last_messages(InitialHistory::Forked(initial), 1);
let truncated = truncate_before_nth_user_message(InitialHistory::Forked(initial), 1);
let got_items = truncated.get_rollout_items();
let expected_items = vec![
RolloutItem::ResponseItem(items[0].clone()),
@@ -269,7 +270,37 @@ mod tests {
.cloned()
.map(RolloutItem::ResponseItem)
.collect();
let truncated2 = truncate_after_dropping_last_messages(InitialHistory::Forked(initial2), 2);
let truncated2 = truncate_before_nth_user_message(InitialHistory::Forked(initial2), 2);
assert!(matches!(truncated2, InitialHistory::New));
}
#[test]
fn ignores_session_prefix_messages_when_truncating() {
let (session, turn_context) = make_session_and_context();
let mut items = session.build_initial_context(&turn_context);
items.push(user_msg("feature request"));
items.push(assistant_msg("ack"));
items.push(user_msg("second question"));
items.push(assistant_msg("answer"));
let rollout_items: Vec<RolloutItem> = items
.iter()
.cloned()
.map(RolloutItem::ResponseItem)
.collect();
let truncated = truncate_before_nth_user_message(InitialHistory::Forked(rollout_items), 1);
let got_items = truncated.get_rollout_items();
let expected: Vec<RolloutItem> = vec![
RolloutItem::ResponseItem(items[0].clone()),
RolloutItem::ResponseItem(items[1].clone()),
RolloutItem::ResponseItem(items[2].clone()),
];
assert_eq!(
serde_json::to_value(&got_items).unwrap(),
serde_json::to_value(&expected).unwrap()
);
}
}

View File

@@ -92,6 +92,8 @@ pub use client_common::Prompt;
pub use client_common::REVIEW_PROMPT;
pub use client_common::ResponseEvent;
pub use client_common::ResponseStream;
pub use codex::compact::content_items_to_text;
pub use codex::compact::is_session_prefix_message;
pub use codex_protocol::models::ContentItem;
pub use codex_protocol::models::LocalShellAction;
pub use codex_protocol::models::LocalShellExecAction;

View File

@@ -74,7 +74,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
"compact+resume test expects resumed path {resumed_path:?} to exist",
);
let forked = fork_conversation(&manager, &config, resumed_path, 1).await;
let forked = fork_conversation(&manager, &config, resumed_path, 2).await;
user_turn(&forked, "AFTER_FORK").await;
// 3. Capture the requests to the model and validate the history slices.
@@ -100,17 +100,15 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
"after-resume input should have at least as many items as after-compact",
);
assert_eq!(compact_arr.as_slice(), &resume_arr[..compact_arr.len()]);
eprint!(
"len of compact: {}, len of fork: {}",
compact_arr.len(),
fork_arr.len()
);
eprintln!("input_after_fork:{}", json!(input_after_fork));
assert!(
compact_arr.len() <= fork_arr.len(),
"after-fork input should have at least as many items as after-compact",
);
assert_eq!(compact_arr.as_slice(), &fork_arr[..compact_arr.len()]);
assert_eq!(
&compact_arr.as_slice()[..compact_arr.len()],
&fork_arr[..compact_arr.len()]
);
let prompt = requests[0]["instructions"]
.as_str()
@@ -824,14 +822,15 @@ async fn resume_conversation(
conversation
}
#[cfg(test)]
async fn fork_conversation(
manager: &ConversationManager,
config: &Config,
path: std::path::PathBuf,
back_steps: usize,
nth_user_message: usize,
) -> Arc<CodexConversation> {
let NewConversation { conversation, .. } = manager
.fork_conversation(back_steps, config.clone(), path)
.fork_conversation(nth_user_message, config.clone(), path)
.await
.expect("fork conversation");
conversation

View File

@@ -5,6 +5,8 @@ use codex_core::ModelProviderInfo;
use codex_core::NewConversation;
use codex_core::ResponseItem;
use codex_core::built_in_model_providers;
use codex_core::content_items_to_text;
use codex_core::is_session_prefix_message;
use codex_core::protocol::ConversationPathResponseEvent;
use codex_core::protocol::EventMsg;
use codex_core::protocol::InputItem;
@@ -104,13 +106,16 @@ async fn fork_conversation_twice_drops_to_first_message() {
items
};
// Compute expected prefixes after each fork by truncating base rollout at nth-from-last user input.
// Compute expected prefixes after each fork by truncating base rollout
// strictly before the nth user input (0-based).
let base_items = read_items(&base_path);
let find_user_input_positions = |items: &[RolloutItem]| -> Vec<usize> {
let mut pos = Vec::new();
for (i, it) in items.iter().enumerate() {
if let RolloutItem::ResponseItem(ResponseItem::Message { role, content, .. }) = it
&& role == "user"
&& content_items_to_text(content)
.is_some_and(|text| !is_session_prefix_message(&text))
{
// Consider any user message as an input boundary; recorder stores both EventMsg and ResponseItem.
// We specifically look for input items, which are represented as ContentItem::InputText.
@@ -126,11 +131,8 @@ async fn fork_conversation_twice_drops_to_first_message() {
};
let user_inputs = find_user_input_positions(&base_items);
// After dropping last user input (n=1), cut strictly before that input if present, else empty.
let cut1 = user_inputs
.get(user_inputs.len().saturating_sub(1))
.copied()
.unwrap_or(0);
// After cutting at nth user input (n=1 → second user message), cut strictly before that input.
let cut1 = user_inputs.get(1).copied().unwrap_or(0);
let expected_after_first: Vec<RolloutItem> = base_items[..cut1].to_vec();
// After dropping again (n=1 on fork1), compute expected relative to fork1's rollout.
@@ -161,12 +163,12 @@ async fn fork_conversation_twice_drops_to_first_message() {
serde_json::to_value(&expected_after_first).unwrap()
);
// Fork again with n=1 → drops the (new) last user message, leaving only the first.
// Fork again with n=0 → drops the (new) last user message, leaving only the first.
let NewConversation {
conversation: codex_fork2,
..
} = conversation_manager
.fork_conversation(1, config_for_fork.clone(), fork1_path.clone())
.fork_conversation(0, config_for_fork.clone(), fork1_path.clone())
.await
.expect("fork 2");

View File

@@ -3,6 +3,7 @@ use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::chatwidget::ChatWidget;
use crate::file_search::FileSearchManager;
use crate::history_cell::HistoryCell;
use crate::pager_overlay::Overlay;
use crate::resume_picker::ResumeSelection;
use crate::tui;
@@ -52,7 +53,7 @@ pub(crate) struct App {
pub(crate) file_search: FileSearchManager,
pub(crate) transcript_lines: Vec<Line<'static>>,
pub(crate) transcript_cells: Vec<Arc<dyn HistoryCell>>,
// Pager overlay state (Transcript or Static like Diff)
pub(crate) overlay: Option<Overlay>,
@@ -138,7 +139,7 @@ impl App {
active_profile,
file_search,
enhanced_keys_supported,
transcript_lines: Vec::new(),
transcript_cells: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),
has_emitted_history_lines: false,
@@ -225,15 +226,12 @@ impl App {
tui.frame_requester().schedule_frame();
}
AppEvent::InsertHistoryCell(cell) => {
let mut cell_transcript = cell.transcript_lines();
if !cell.is_stream_continuation() && !self.transcript_lines.is_empty() {
cell_transcript.insert(0, Line::from(""));
}
let cell: Arc<dyn HistoryCell> = cell.into();
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
t.insert_lines(cell_transcript.clone());
t.insert_cell(cell.clone());
tui.frame_requester().schedule_frame();
}
self.transcript_lines.extend(cell_transcript.clone());
self.transcript_cells.push(cell.clone());
let mut display = cell.display_lines(tui.terminal.last_known_screen_size.width);
if !display.is_empty() {
// Only insert a separating blank line for new cells that are not
@@ -380,7 +378,7 @@ impl App {
} => {
// Enter alternate screen and set viewport to full size.
let _ = tui.enter_alt_screen();
self.overlay = Some(Overlay::new_transcript(self.transcript_lines.clone()));
self.overlay = Some(Overlay::new_transcript(self.transcript_cells.clone()));
tui.frame_requester().schedule_frame();
}
// Esc primes/advances backtracking only in normal (not working) mode
@@ -405,7 +403,7 @@ impl App {
kind: KeyEventKind::Press,
..
} if self.backtrack.primed
&& self.backtrack.count > 0
&& self.backtrack.nth_user_message != usize::MAX
&& self.chat_widget.composer_is_empty() =>
{
// Delegate to helper for clarity; preserves behavior.
@@ -439,7 +437,6 @@ mod tests {
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::ConversationManager;
use ratatui::text::Line;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -462,7 +459,7 @@ mod tests {
config,
active_profile: None,
file_search,
transcript_lines: Vec::<Line<'static>>::new(),
transcript_cells: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),
has_emitted_history_lines: false,

View File

@@ -1,7 +1,8 @@
use std::path::PathBuf;
use std::sync::Arc;
use crate::app::App;
use crate::backtrack_helpers;
use crate::history_cell::UserHistoryCell;
use crate::pager_overlay::Overlay;
use crate::tui;
use crate::tui::TuiEvent;
@@ -19,11 +20,11 @@ pub(crate) struct BacktrackState {
pub(crate) primed: bool,
/// Session id of the base conversation to fork from.
pub(crate) base_id: Option<ConversationId>,
/// Current step count (Nth last user message).
pub(crate) count: usize,
/// Index in the transcript of the last user message.
pub(crate) nth_user_message: usize,
/// True when the transcript overlay is showing a backtrack preview.
pub(crate) overlay_preview_active: bool,
/// Pending fork request: (base_id, drop_count, prefill).
/// Pending fork request: (base_id, nth_user_message, prefill).
pub(crate) pending: Option<(ConversationId, usize, String)>,
}
@@ -96,9 +97,9 @@ impl App {
&mut self,
prefill: String,
base_id: ConversationId,
drop_last_messages: usize,
nth_user_message: usize,
) {
self.backtrack.pending = Some((base_id, drop_last_messages, prefill));
self.backtrack.pending = Some((base_id, nth_user_message, prefill));
self.app_event_tx.send(crate::app_event::AppEvent::CodexOp(
codex_core::protocol::Op::GetPath,
));
@@ -107,7 +108,7 @@ impl App {
/// Open transcript overlay (enters alternate screen and shows full transcript).
pub(crate) fn open_transcript_overlay(&mut self, tui: &mut tui::Tui) {
let _ = tui.enter_alt_screen();
self.overlay = Some(Overlay::new_transcript(self.transcript_lines.clone()));
self.overlay = Some(Overlay::new_transcript(self.transcript_cells.clone()));
tui.frame_requester().schedule_frame();
}
@@ -130,15 +131,17 @@ impl App {
/// Re-render the full transcript into the terminal scrollback in one call.
/// Useful when switching sessions to ensure prior history remains visible.
pub(crate) fn render_transcript_once(&mut self, tui: &mut tui::Tui) {
if !self.transcript_lines.is_empty() {
tui.insert_history_lines(self.transcript_lines.clone());
if !self.transcript_cells.is_empty() {
for cell in &self.transcript_cells {
tui.insert_history_lines(cell.transcript_lines());
}
}
}
/// Initialize backtrack state and show composer hint.
fn prime_backtrack(&mut self) {
self.backtrack.primed = true;
self.backtrack.count = 0;
self.backtrack.nth_user_message = usize::MAX;
self.backtrack.base_id = self.chat_widget.conversation_id();
self.chat_widget.show_esc_backtrack_hint();
}
@@ -157,51 +160,44 @@ impl App {
self.backtrack.primed = true;
self.backtrack.base_id = self.chat_widget.conversation_id();
self.backtrack.overlay_preview_active = true;
let sel = self.compute_backtrack_selection(tui, 1);
self.apply_backtrack_selection(sel);
let last_user_cell_position = self
.transcript_cells
.iter()
.filter_map(|c| c.as_any().downcast_ref::<UserHistoryCell>())
.count() as i64
- 1;
if last_user_cell_position >= 0 {
self.apply_backtrack_selection(last_user_cell_position as usize);
}
tui.frame_requester().schedule_frame();
}
/// Step selection to the next older user message and update overlay.
fn step_backtrack_and_highlight(&mut self, tui: &mut tui::Tui) {
let next = self.backtrack.count.saturating_add(1);
let sel = self.compute_backtrack_selection(tui, next);
self.apply_backtrack_selection(sel);
let last_user_cell_position = self
.transcript_cells
.iter()
.filter(|c| c.as_any().is::<UserHistoryCell>())
.take(self.backtrack.nth_user_message)
.count()
.saturating_sub(1);
self.apply_backtrack_selection(last_user_cell_position);
tui.frame_requester().schedule_frame();
}
/// Compute normalized target, scroll offset, and highlight for requested step.
fn compute_backtrack_selection(
&self,
tui: &tui::Tui,
requested_n: usize,
) -> (usize, Option<usize>, Option<(usize, usize)>) {
let nth = backtrack_helpers::normalize_backtrack_n(&self.transcript_lines, requested_n);
let header_idx =
backtrack_helpers::find_nth_last_user_header_index(&self.transcript_lines, nth);
let offset = header_idx.map(|idx| {
backtrack_helpers::wrapped_offset_before(
&self.transcript_lines,
idx,
tui.terminal.viewport_area.width,
)
});
let hl = backtrack_helpers::highlight_range_for_nth_last_user(&self.transcript_lines, nth);
(nth, offset, hl)
}
/// Apply a computed backtrack selection to the overlay and internal counter.
fn apply_backtrack_selection(
&mut self,
selection: (usize, Option<usize>, Option<(usize, usize)>),
) {
let (nth, offset, hl) = selection;
self.backtrack.count = nth;
fn apply_backtrack_selection(&mut self, nth_user_message: usize) {
self.backtrack.nth_user_message = nth_user_message;
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
if let Some(off) = offset {
t.set_scroll_offset(off);
let cell = self
.transcript_cells
.iter()
.enumerate()
.filter(|(_, c)| c.as_any().is::<UserHistoryCell>())
.nth(nth_user_message);
if let Some((idx, _)) = cell {
t.set_highlight_cell(Some(idx));
}
t.set_highlight_range(hl);
}
}
@@ -219,13 +215,19 @@ impl App {
/// Handle Enter in overlay backtrack preview: confirm selection and reset state.
fn overlay_confirm_backtrack(&mut self, tui: &mut tui::Tui) {
let nth_user_message = self.backtrack.nth_user_message;
if let Some(base_id) = self.backtrack.base_id {
let drop_last_messages = self.backtrack.count;
let prefill =
backtrack_helpers::nth_last_user_text(&self.transcript_lines, drop_last_messages)
.unwrap_or_default();
let user_cells = self
.transcript_cells
.iter()
.filter_map(|c| c.as_any().downcast_ref::<UserHistoryCell>())
.collect::<Vec<_>>();
let prefill = user_cells
.get(nth_user_message)
.map(|c| c.message.clone())
.unwrap_or_default();
self.close_transcript_overlay(tui);
self.request_backtrack(prefill, base_id, drop_last_messages);
self.request_backtrack(prefill, base_id, nth_user_message);
}
self.reset_backtrack_state();
}
@@ -244,11 +246,15 @@ impl App {
/// Computes the prefill from the selected user message and requests history.
pub(crate) fn confirm_backtrack_from_main(&mut self) {
if let Some(base_id) = self.backtrack.base_id {
let drop_last_messages = self.backtrack.count;
let prefill =
backtrack_helpers::nth_last_user_text(&self.transcript_lines, drop_last_messages)
.unwrap_or_default();
self.request_backtrack(prefill, base_id, drop_last_messages);
let prefill = self
.transcript_cells
.iter()
.filter(|c| c.as_any().is::<UserHistoryCell>())
.nth(self.backtrack.nth_user_message)
.and_then(|c| c.as_any().downcast_ref::<UserHistoryCell>())
.map(|c| c.message.clone())
.unwrap_or_default();
self.request_backtrack(prefill, base_id, self.backtrack.nth_user_message);
}
self.reset_backtrack_state();
}
@@ -257,7 +263,7 @@ impl App {
pub(crate) fn reset_backtrack_state(&mut self) {
self.backtrack.primed = false;
self.backtrack.base_id = None;
self.backtrack.count = 0;
self.backtrack.nth_user_message = usize::MAX;
// In case a hint is somehow still visible (e.g., race with overlay open/close).
self.chat_widget.clear_esc_backtrack_hint();
}
@@ -271,9 +277,9 @@ impl App {
) -> Result<()> {
if let Some((base_id, _, _)) = self.backtrack.pending.as_ref()
&& ev.conversation_id == *base_id
&& let Some((_, drop_count, prefill)) = self.backtrack.pending.take()
&& let Some((_, nth_user_message, prefill)) = self.backtrack.pending.take()
{
self.fork_and_switch_to_new_conversation(tui, ev, drop_count, prefill)
self.fork_and_switch_to_new_conversation(tui, ev, nth_user_message, prefill)
.await;
}
Ok(())
@@ -284,17 +290,17 @@ impl App {
&mut self,
tui: &mut tui::Tui,
ev: ConversationPathResponseEvent,
drop_count: usize,
nth_user_message: usize,
prefill: String,
) {
let cfg = self.chat_widget.config_ref().clone();
// Perform the fork via a thin wrapper for clarity/testability.
let result = self
.perform_fork(ev.path.clone(), drop_count, cfg.clone())
.perform_fork(ev.path.clone(), nth_user_message, cfg.clone())
.await;
match result {
Ok(new_conv) => {
self.install_forked_conversation(tui, cfg, new_conv, drop_count, &prefill)
self.install_forked_conversation(tui, cfg, new_conv, nth_user_message, &prefill)
}
Err(e) => tracing::error!("error forking conversation: {e:#}"),
}
@@ -304,10 +310,12 @@ impl App {
async fn perform_fork(
&self,
path: PathBuf,
drop_count: usize,
nth_user_message: usize,
cfg: codex_core::config::Config,
) -> codex_core::error::Result<codex_core::NewConversation> {
self.server.fork_conversation(drop_count, cfg, path).await
self.server
.fork_conversation(nth_user_message, cfg, path)
.await
}
/// Install a forked conversation into the ChatWidget and update UI to reflect selection.
@@ -316,7 +324,7 @@ impl App {
tui: &mut tui::Tui,
cfg: codex_core::config::Config,
new_conv: codex_core::NewConversation,
drop_count: usize,
nth_user_message: usize,
prefill: &str,
) {
let conv = new_conv.conversation;
@@ -333,7 +341,7 @@ impl App {
self.chat_widget =
crate::chatwidget::ChatWidget::new_from_existing(init, conv, session_configured);
// Trim transcript up to the selected user message and re-render it.
self.trim_transcript_for_backtrack(drop_count);
self.trim_transcript_for_backtrack(nth_user_message);
self.render_transcript_once(tui);
if !prefill.is_empty() {
self.chat_widget.set_composer_text(prefill.to_string());
@@ -341,14 +349,129 @@ impl App {
tui.frame_requester().schedule_frame();
}
/// Trim transcript_lines to preserve only content up to the selected user message.
fn trim_transcript_for_backtrack(&mut self, drop_count: usize) {
if let Some(cut_idx) =
backtrack_helpers::find_nth_last_user_header_index(&self.transcript_lines, drop_count)
{
self.transcript_lines.truncate(cut_idx);
} else {
self.transcript_lines.clear();
}
/// Trim transcript_cells to preserve only content up to the selected user message.
fn trim_transcript_for_backtrack(&mut self, nth_user_message: usize) {
trim_transcript_cells_to_nth_user(&mut self.transcript_cells, nth_user_message);
}
}
fn trim_transcript_cells_to_nth_user(
transcript_cells: &mut Vec<Arc<dyn crate::history_cell::HistoryCell>>,
nth_user_message: usize,
) {
if nth_user_message == usize::MAX {
return;
}
let cut_idx = transcript_cells
.iter()
.enumerate()
.filter_map(|(idx, cell)| cell.as_any().is::<UserHistoryCell>().then_some(idx))
.nth(nth_user_message)
.unwrap_or(transcript_cells.len());
transcript_cells.truncate(cut_idx);
}
#[cfg(test)]
mod tests {
use super::*;
use crate::history_cell::AgentMessageCell;
use crate::history_cell::HistoryCell;
use ratatui::prelude::Line;
use std::sync::Arc;
#[test]
fn trim_transcript_for_first_user_drops_user_and_newer_cells() {
let mut cells: Vec<Arc<dyn HistoryCell>> = vec![
Arc::new(UserHistoryCell {
message: "first user".to_string(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("assistant")], true))
as Arc<dyn HistoryCell>,
];
trim_transcript_cells_to_nth_user(&mut cells, 0);
assert!(cells.is_empty());
}
#[test]
fn trim_transcript_preserves_cells_before_selected_user() {
let mut cells: Vec<Arc<dyn HistoryCell>> = vec![
Arc::new(AgentMessageCell::new(vec![Line::from("intro")], true))
as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "first".to_string(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("after")], false))
as Arc<dyn HistoryCell>,
];
trim_transcript_cells_to_nth_user(&mut cells, 0);
assert_eq!(cells.len(), 1);
let agent = cells[0]
.as_any()
.downcast_ref::<AgentMessageCell>()
.expect("agent cell");
let agent_lines = agent.display_lines(u16::MAX);
assert_eq!(agent_lines.len(), 1);
let intro_text: String = agent_lines[0]
.spans
.iter()
.map(|span| span.content.as_ref())
.collect();
assert_eq!(intro_text, "> intro");
}
#[test]
fn trim_transcript_for_later_user_keeps_prior_history() {
let mut cells: Vec<Arc<dyn HistoryCell>> = vec![
Arc::new(AgentMessageCell::new(vec![Line::from("intro")], true))
as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "first".to_string(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("between")], false))
as Arc<dyn HistoryCell>,
Arc::new(UserHistoryCell {
message: "second".to_string(),
}) as Arc<dyn HistoryCell>,
Arc::new(AgentMessageCell::new(vec![Line::from("tail")], false))
as Arc<dyn HistoryCell>,
];
trim_transcript_cells_to_nth_user(&mut cells, 1);
assert_eq!(cells.len(), 3);
let agent_intro = cells[0]
.as_any()
.downcast_ref::<AgentMessageCell>()
.expect("intro agent");
let intro_lines = agent_intro.display_lines(u16::MAX);
let intro_text: String = intro_lines[0]
.spans
.iter()
.map(|span| span.content.as_ref())
.collect();
assert_eq!(intro_text, "> intro");
let user_first = cells[1]
.as_any()
.downcast_ref::<UserHistoryCell>()
.expect("first user");
assert_eq!(user_first.message, "first");
let agent_between = cells[2]
.as_any()
.downcast_ref::<AgentMessageCell>()
.expect("between agent");
let between_lines = agent_between.display_lines(u16::MAX);
let between_text: String = between_lines[0]
.spans
.iter()
.map(|span| span.content.as_ref())
.collect();
assert_eq!(between_text, " between");
}
}

View File

@@ -1,153 +0,0 @@
use ratatui::text::Line;
/// Convenience: compute the highlight range for the Nth last user message.
pub(crate) fn highlight_range_for_nth_last_user(
lines: &[Line<'_>],
n: usize,
) -> Option<(usize, usize)> {
let header = find_nth_last_user_header_index(lines, n)?;
Some(highlight_range_from_header(lines, header))
}
/// Compute the wrapped display-line offset before `header_idx`, for a given width.
pub(crate) fn wrapped_offset_before(lines: &[Line<'_>], header_idx: usize, width: u16) -> usize {
let before = &lines[0..header_idx];
crate::wrapping::word_wrap_lines(before, width as usize).len()
}
/// Find the header index for the Nth last user message in the transcript.
/// Returns `None` if `n == 0` or there are fewer than `n` user messages.
pub(crate) fn find_nth_last_user_header_index(lines: &[Line<'_>], n: usize) -> Option<usize> {
if n == 0 {
return None;
}
let mut found = 0usize;
for (idx, line) in lines.iter().enumerate().rev() {
let content: String = line
.spans
.iter()
.map(|s| s.content.as_ref())
.collect::<Vec<_>>()
.join("");
if content.trim() == "user" {
found += 1;
if found == n {
return Some(idx);
}
}
}
None
}
/// Normalize a requested backtrack step `n` against the available user messages.
/// - Returns `0` if there are no user messages.
/// - Returns `n` if the Nth last user message exists.
/// - Otherwise wraps to `1` (the most recent user message).
pub(crate) fn normalize_backtrack_n(lines: &[Line<'_>], n: usize) -> usize {
if n == 0 {
return 0;
}
if find_nth_last_user_header_index(lines, n).is_some() {
return n;
}
if find_nth_last_user_header_index(lines, 1).is_some() {
1
} else {
0
}
}
/// Extract the text content of the Nth last user message.
/// The message body is considered to be the lines following the "user" header
/// until the first blank line.
pub(crate) fn nth_last_user_text(lines: &[Line<'_>], n: usize) -> Option<String> {
let header_idx = find_nth_last_user_header_index(lines, n)?;
extract_message_text_after_header(lines, header_idx)
}
/// Extract message text starting after `header_idx` until the first blank line.
fn extract_message_text_after_header(lines: &[Line<'_>], header_idx: usize) -> Option<String> {
let start = header_idx + 1;
let mut out: Vec<String> = Vec::new();
for line in lines.iter().skip(start) {
let is_blank = line
.spans
.iter()
.all(|s| s.content.as_ref().trim().is_empty());
if is_blank {
break;
}
let text: String = line
.spans
.iter()
.map(|s| s.content.as_ref())
.collect::<Vec<_>>()
.join("");
out.push(text);
}
if out.is_empty() {
None
} else {
Some(out.join("\n"))
}
}
/// Given a header index, return the inclusive range for the message block
/// [header_idx, end) where end is the first blank line after the header or the
/// end of the transcript.
fn highlight_range_from_header(lines: &[Line<'_>], header_idx: usize) -> (usize, usize) {
let mut end = header_idx + 1;
while end < lines.len() {
let is_blank = lines[end]
.spans
.iter()
.all(|s| s.content.as_ref().trim().is_empty());
if is_blank {
break;
}
end += 1;
}
(header_idx, end)
}
#[cfg(test)]
mod tests {
use super::*;
fn line(s: &str) -> Line<'static> {
s.to_string().into()
}
fn transcript_with_users(count: usize) -> Vec<Line<'static>> {
// Build a transcript with `count` user messages, each followed by one body line and a blank line.
let mut v = Vec::new();
for i in 0..count {
v.push(line("user"));
v.push(line(&format!("message {i}")));
v.push(line(""));
}
v
}
#[test]
fn normalize_wraps_to_one_when_past_oldest() {
let lines = transcript_with_users(2);
assert_eq!(normalize_backtrack_n(&lines, 1), 1);
assert_eq!(normalize_backtrack_n(&lines, 2), 2);
// Requesting 3rd when only 2 exist wraps to 1
assert_eq!(normalize_backtrack_n(&lines, 3), 1);
}
#[test]
fn normalize_returns_zero_when_no_user_messages() {
let lines = transcript_with_users(0);
assert_eq!(normalize_backtrack_n(&lines, 1), 0);
assert_eq!(normalize_backtrack_n(&lines, 5), 0);
}
#[test]
fn normalize_keeps_valid_n() {
let lines = transcript_with_users(3);
assert_eq!(normalize_backtrack_n(&lines, 2), 2);
}
}

View File

@@ -44,6 +44,7 @@ use ratatui::style::Stylize;
use ratatui::widgets::Paragraph;
use ratatui::widgets::WidgetRef;
use ratatui::widgets::Wrap;
use std::any::Any;
use std::collections::HashMap;
use std::io::Cursor;
use std::path::Path;
@@ -70,7 +71,7 @@ pub(crate) enum PatchEventType {
/// Represents an event to display in the conversation history. Returns its
/// `Vec<Line<'static>>` representation to make it easier to display in a
/// scrollable list.
pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync {
pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync + Any {
fn display_lines(&self, width: u16) -> Vec<Line<'static>>;
fn transcript_lines(&self) -> Vec<Line<'static>> {
@@ -90,9 +91,15 @@ pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync {
}
}
impl dyn HistoryCell {
pub(crate) fn as_any(&self) -> &dyn Any {
self
}
}
#[derive(Debug)]
pub(crate) struct UserHistoryCell {
message: String,
pub message: String,
}
impl HistoryCell for UserHistoryCell {

View File

@@ -34,7 +34,6 @@ mod app_backtrack;
mod app_event;
mod app_event_sender;
mod ascii_animation;
mod backtrack_helpers;
mod bottom_pane;
mod chatwidget;
mod citation_regex;

View File

@@ -1,6 +1,8 @@
use std::io::Result;
use std::sync::Arc;
use std::time::Duration;
use crate::history_cell::HistoryCell;
use crate::render::line_utils::push_owned_lines;
use crate::tui;
use crate::tui::TuiEvent;
@@ -15,6 +17,7 @@ use ratatui::style::Styled;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::text::Span;
use ratatui::text::Text;
use ratatui::widgets::Paragraph;
use ratatui::widgets::WidgetRef;
@@ -24,8 +27,8 @@ pub(crate) enum Overlay {
}
impl Overlay {
pub(crate) fn new_transcript(lines: Vec<Line<'static>>) -> Self {
Self::Transcript(TranscriptOverlay::new(lines))
pub(crate) fn new_transcript(cells: Vec<Arc<dyn HistoryCell>>) -> Self {
Self::Transcript(TranscriptOverlay::new(cells))
}
pub(crate) fn new_static_with_title(lines: Vec<Line<'static>>, title: String) -> Self {
@@ -73,21 +76,24 @@ fn render_key_hints(area: Rect, buf: &mut Buffer, pairs: &[(&str, &str)]) {
/// Generic widget for rendering a pager view.
struct PagerView {
lines: Vec<Line<'static>>,
texts: Vec<Text<'static>>,
scroll_offset: usize,
title: String,
wrap_cache: Option<WrapCache>,
last_content_height: Option<usize>,
/// If set, on next render ensure this chunk is visible.
pending_scroll_chunk: Option<usize>,
}
impl PagerView {
fn new(lines: Vec<Line<'static>>, title: String, scroll_offset: usize) -> Self {
fn new(texts: Vec<Text<'static>>, title: String, scroll_offset: usize) -> Self {
Self {
lines,
texts,
scroll_offset,
title,
wrap_cache: None,
last_content_height: None,
pending_scroll_chunk: None,
}
}
@@ -96,6 +102,14 @@ impl PagerView {
let content_area = self.scroll_area(area);
self.update_last_content_height(content_area.height);
self.ensure_wrapped(content_area.width);
// If there is a pending request to scroll a specific chunk into view,
// satisfy it now that wrapping is up to date for this width.
if let (Some(idx), Some(cache)) =
(self.pending_scroll_chunk.take(), self.wrap_cache.as_ref())
&& let Some(range) = cache.chunk_ranges.get(idx).cloned()
{
self.ensure_range_visible(range, content_area.height as usize, cache.wrapped.len());
}
// Compute page bounds without holding an immutable borrow on cache while mutating self
let wrapped_len = self
.wrap_cache
@@ -108,40 +122,12 @@ impl PagerView {
let start = self.scroll_offset;
let end = (start + content_area.height as usize).min(wrapped_len);
let (wrapped, _src_idx) = self.cached();
let wrapped = self.cached();
let page = &wrapped[start..end];
self.render_content_page_prepared(content_area, buf, page);
self.render_bottom_bar(area, content_area, buf, wrapped);
}
fn render_with_highlight(
&mut self,
area: Rect,
buf: &mut Buffer,
highlight: Option<(usize, usize)>,
) {
self.render_header(area, buf);
let content_area = self.scroll_area(area);
self.update_last_content_height(content_area.height);
self.ensure_wrapped(content_area.width);
// Compute page bounds first to avoid borrow conflicts
let wrapped_len = self
.wrap_cache
.as_ref()
.map(|c| c.wrapped.len())
.unwrap_or(0);
self.scroll_offset = self
.scroll_offset
.min(wrapped_len.saturating_sub(content_area.height as usize));
let start = self.scroll_offset;
let end = (start + content_area.height as usize).min(wrapped_len);
let (wrapped, src_idx) = self.cached();
let page = self.page_with_optional_highlight(wrapped, src_idx, start, end, highlight);
self.render_content_page_prepared(content_area, buf, &page);
self.render_bottom_bar(area, content_area, buf, wrapped);
}
fn render_header(&self, area: Rect, buf: &mut Buffer) {
Span::from("/ ".repeat(area.width as usize / 2))
.dim()
@@ -270,7 +256,8 @@ impl PagerView {
struct WrapCache {
width: u16,
wrapped: Vec<Line<'static>>,
src_idx: Vec<usize>,
/// For each input Text chunk, the inclusive-excluded range of wrapped lines produced.
chunk_ranges: Vec<std::ops::Range<usize>>,
base_len: usize,
}
@@ -278,74 +265,39 @@ impl PagerView {
fn ensure_wrapped(&mut self, width: u16) {
let width = width.max(1);
let needs = match self.wrap_cache {
Some(ref c) => c.width != width || c.base_len != self.lines.len(),
Some(ref c) => c.width != width || c.base_len != self.texts.len(),
None => true,
};
if !needs {
return;
}
let mut wrapped: Vec<Line<'static>> = Vec::new();
let mut src_idx: Vec<usize> = Vec::new();
for (i, line) in self.lines.iter().enumerate() {
let ws = crate::wrapping::word_wrap_line(line, width as usize);
src_idx.extend(std::iter::repeat_n(i, ws.len()));
push_owned_lines(&ws, &mut wrapped);
let mut chunk_ranges: Vec<std::ops::Range<usize>> = Vec::with_capacity(self.texts.len());
for text in &self.texts {
let start = wrapped.len();
for line in &text.lines {
let ws = crate::wrapping::word_wrap_line(line, width as usize);
push_owned_lines(&ws, &mut wrapped);
}
let end = wrapped.len();
chunk_ranges.push(start..end);
}
self.wrap_cache = Some(WrapCache {
width,
wrapped,
src_idx,
base_len: self.lines.len(),
chunk_ranges,
base_len: self.texts.len(),
});
}
fn cached(&self) -> (&[Line<'static>], &[usize]) {
fn cached(&self) -> &[Line<'static>] {
if let Some(cache) = self.wrap_cache.as_ref() {
(&cache.wrapped, &cache.src_idx)
&cache.wrapped
} else {
(&[], &[])
&[]
}
}
fn page_with_optional_highlight<'a>(
&self,
wrapped: &'a [Line<'static>],
src_idx: &[usize],
start: usize,
end: usize,
highlight: Option<(usize, usize)>,
) -> std::borrow::Cow<'a, [Line<'static>]> {
use ratatui::style::Modifier;
let (hi_start, hi_end) = match highlight {
Some(r) => r,
None => return std::borrow::Cow::Borrowed(&wrapped[start..end]),
};
let mut out: Vec<Line<'static>> = Vec::with_capacity(end - start);
let mut bold_done = false;
for (row, src_line) in wrapped
.iter()
.enumerate()
.skip(start)
.take(end.saturating_sub(start))
{
let mut line = src_line.clone();
if let Some(src) = src_idx.get(row).copied()
&& src >= hi_start
&& src < hi_end
{
for (i, s) in line.spans.iter_mut().enumerate() {
s.style.add_modifier |= Modifier::REVERSED;
if !bold_done && i == 0 {
s.style.add_modifier |= Modifier::BOLD;
bold_done = true;
}
}
}
out.push(line);
}
std::borrow::Cow::Owned(out)
}
fn is_scrolled_to_bottom(&self) -> bool {
if self.scroll_offset == usize::MAX {
return true;
@@ -363,38 +315,108 @@ impl PagerView {
let max_scroll = cache.wrapped.len().saturating_sub(visible);
self.scroll_offset >= max_scroll
}
/// Request that the given text chunk index be scrolled into view on next render.
fn scroll_chunk_into_view(&mut self, chunk_index: usize) {
self.pending_scroll_chunk = Some(chunk_index);
}
fn ensure_range_visible(
&mut self,
range: std::ops::Range<usize>,
viewport_height: usize,
total_wrapped: usize,
) {
if viewport_height == 0 || total_wrapped == 0 {
return;
}
let first = range.start.min(total_wrapped.saturating_sub(1));
let last = range
.end
.saturating_sub(1)
.min(total_wrapped.saturating_sub(1));
let current_top = self.scroll_offset.min(total_wrapped.saturating_sub(1));
let current_bottom = current_top.saturating_add(viewport_height.saturating_sub(1));
if first < current_top {
self.scroll_offset = first;
} else if last > current_bottom {
// Scroll just enough so that 'last' is visible at the bottom
self.scroll_offset = last.saturating_sub(viewport_height.saturating_sub(1));
}
}
}
pub(crate) struct TranscriptOverlay {
view: PagerView,
highlight_range: Option<(usize, usize)>,
cells: Vec<Arc<dyn HistoryCell>>,
highlight_cell: Option<usize>,
is_done: bool,
}
impl TranscriptOverlay {
pub(crate) fn new(transcript_lines: Vec<Line<'static>>) -> Self {
pub(crate) fn new(transcript_cells: Vec<Arc<dyn HistoryCell>>) -> Self {
Self {
view: PagerView::new(
transcript_lines,
Self::render_cells_to_texts(&transcript_cells, None),
"T R A N S C R I P T".to_string(),
usize::MAX,
),
highlight_range: None,
cells: transcript_cells,
highlight_cell: None,
is_done: false,
}
}
pub(crate) fn insert_lines(&mut self, lines: Vec<Line<'static>>) {
fn render_cells_to_texts(
cells: &[Arc<dyn HistoryCell>],
highlight_cell: Option<usize>,
) -> Vec<Text<'static>> {
let mut texts: Vec<Text<'static>> = Vec::new();
let mut first = true;
for (idx, cell) in cells.iter().enumerate() {
let mut lines: Vec<Line<'static>> = Vec::new();
if !cell.is_stream_continuation() && !first {
lines.push(Line::from(""));
}
let cell_lines = if Some(idx) == highlight_cell {
cell.transcript_lines()
.into_iter()
.map(|l| l.reversed())
.collect()
} else {
cell.transcript_lines()
};
lines.extend(cell_lines);
texts.push(Text::from(lines));
first = false;
}
texts
}
pub(crate) fn insert_cell(&mut self, cell: Arc<dyn HistoryCell>) {
let follow_bottom = self.view.is_scrolled_to_bottom();
self.view.lines.extend(lines);
// Append as a new Text chunk (with a separating blank if needed)
let mut lines: Vec<Line<'static>> = Vec::new();
if !cell.is_stream_continuation() && !self.cells.is_empty() {
lines.push(Line::from(""));
}
lines.extend(cell.transcript_lines());
self.view.texts.push(Text::from(lines));
self.cells.push(cell);
self.view.wrap_cache = None;
if follow_bottom {
self.view.scroll_offset = usize::MAX;
}
}
pub(crate) fn set_highlight_range(&mut self, range: Option<(usize, usize)>) {
self.highlight_range = range;
pub(crate) fn set_highlight_cell(&mut self, cell: Option<usize>) {
self.highlight_cell = cell;
self.view.wrap_cache = None;
self.view.texts = Self::render_cells_to_texts(&self.cells, self.highlight_cell);
if let Some(idx) = self.highlight_cell {
self.view.scroll_chunk_into_view(idx);
}
}
fn render_hints(&self, area: Rect, buf: &mut Buffer) {
@@ -402,9 +424,7 @@ impl TranscriptOverlay {
let line2 = Rect::new(area.x, area.y.saturating_add(1), area.width, 1);
render_key_hints(line1, buf, PAGER_KEY_HINTS);
let mut pairs: Vec<(&str, &str)> = vec![("q", "quit"), ("Esc", "edit prev")];
if let Some((start, end)) = self.highlight_range
&& end > start
{
if self.highlight_cell.is_some() {
pairs.push(("", "edit message"));
}
render_key_hints(line2, buf, &pairs);
@@ -414,8 +434,7 @@ impl TranscriptOverlay {
let top_h = area.height.saturating_sub(3);
let top = Rect::new(area.x, area.y, area.width, top_h);
let bottom = Rect::new(area.x, area.y + top_h, area.width, 3);
self.view
.render_with_highlight(top, buf, self.highlight_range);
self.view.render(top, buf);
self.render_hints(bottom, buf);
}
}
@@ -458,9 +477,6 @@ impl TranscriptOverlay {
pub(crate) fn is_done(&self) -> bool {
self.is_done
}
pub(crate) fn set_scroll_offset(&mut self, offset: usize) {
self.view.scroll_offset = offset;
}
}
pub(crate) struct StaticOverlay {
@@ -471,7 +487,7 @@ pub(crate) struct StaticOverlay {
impl StaticOverlay {
pub(crate) fn with_title(lines: Vec<Line<'static>>, title: String) -> Self {
Self {
view: PagerView::new(lines, title, 0),
view: PagerView::new(vec![Text::from(lines)], title, 0),
is_done: false,
}
}
@@ -534,9 +550,26 @@ mod tests {
use ratatui::Terminal;
use ratatui::backend::TestBackend;
#[derive(Debug)]
struct TestCell {
lines: Vec<Line<'static>>,
}
impl crate::history_cell::HistoryCell for TestCell {
fn display_lines(&self, _width: u16) -> Vec<Line<'static>> {
self.lines.clone()
}
fn transcript_lines(&self) -> Vec<Line<'static>> {
self.lines.clone()
}
}
#[test]
fn edit_prev_hint_is_visible() {
let mut overlay = TranscriptOverlay::new(vec![Line::from("hello")]);
let mut overlay = TranscriptOverlay::new(vec![Arc::new(TestCell {
lines: vec![Line::from("hello")],
})]);
// Render into a small buffer and assert the backtrack hint is present
let area = Rect::new(0, 0, 40, 10);
@@ -561,9 +594,15 @@ mod tests {
fn transcript_overlay_snapshot_basic() {
// Prepare a transcript overlay with a few lines
let mut overlay = TranscriptOverlay::new(vec![
Line::from("alpha"),
Line::from("beta"),
Line::from("gamma"),
Arc::new(TestCell {
lines: vec![Line::from("alpha")],
}),
Arc::new(TestCell {
lines: vec![Line::from("beta")],
}),
Arc::new(TestCell {
lines: vec![Line::from("gamma")],
}),
]);
let mut term = Terminal::new(TestBackend::new(40, 10)).expect("term");
term.draw(|f| overlay.render(f.area(), f.buffer_mut()))
@@ -573,8 +612,15 @@ mod tests {
#[test]
fn transcript_overlay_keeps_scroll_pinned_at_bottom() {
let mut overlay =
TranscriptOverlay::new((0..20).map(|i| Line::from(format!("line{i}"))).collect());
let mut overlay = TranscriptOverlay::new(
(0..20)
.map(|i| {
Arc::new(TestCell {
lines: vec![Line::from(format!("line{i}"))],
}) as Arc<dyn HistoryCell>
})
.collect(),
);
let mut term = Terminal::new(TestBackend::new(40, 12)).expect("term");
term.draw(|f| overlay.render(f.area(), f.buffer_mut()))
.expect("draw");
@@ -584,22 +630,33 @@ mod tests {
"expected initial render to leave view at bottom"
);
overlay.insert_lines(vec!["tail".into()]);
overlay.insert_cell(Arc::new(TestCell {
lines: vec!["tail".into()],
}));
assert_eq!(overlay.view.scroll_offset, usize::MAX);
}
#[test]
fn transcript_overlay_preserves_manual_scroll_position() {
let mut overlay =
TranscriptOverlay::new((0..20).map(|i| Line::from(format!("line{i}"))).collect());
let mut overlay = TranscriptOverlay::new(
(0..20)
.map(|i| {
Arc::new(TestCell {
lines: vec![Line::from(format!("line{i}"))],
}) as Arc<dyn HistoryCell>
})
.collect(),
);
let mut term = Terminal::new(TestBackend::new(40, 12)).expect("term");
term.draw(|f| overlay.render(f.area(), f.buffer_mut()))
.expect("draw");
overlay.view.scroll_offset = 0;
overlay.insert_lines(vec!["tail".into()]);
overlay.insert_cell(Arc::new(TestCell {
lines: vec!["tail".into()],
}));
assert_eq!(overlay.view.scroll_offset, 0);
}
@@ -620,17 +677,21 @@ mod tests {
#[test]
fn pager_wrap_cache_reuses_for_same_width_and_rebuilds_on_change() {
let long = "This is a long line that should wrap multiple times to ensure non-empty wrapped output.";
let mut pv = PagerView::new(vec![long.into(), long.into()], "T".to_string(), 0);
let mut pv = PagerView::new(
vec![Text::from(vec![long.into()]), Text::from(vec![long.into()])],
"T".to_string(),
0,
);
// Build cache at width 24
pv.ensure_wrapped(24);
let (w1, _) = pv.cached();
let w1 = pv.cached();
assert!(!w1.is_empty(), "expected wrapped output to be non-empty");
let ptr1 = w1.as_ptr();
// Re-run with same width: cache should be reused (pointer stability heuristic)
pv.ensure_wrapped(24);
let (w2, _) = pv.cached();
let w2 = pv.cached();
let ptr2 = w2.as_ptr();
assert_eq!(ptr1, ptr2, "cache should not rebuild for unchanged width");
@@ -638,7 +699,7 @@ mod tests {
// Drop immutable borrow before mutating
let prev_len = w2.len();
pv.ensure_wrapped(36);
let (w3, _) = pv.cached();
let w3 = pv.cached();
assert_ne!(
prev_len,
w3.len(),
@@ -649,15 +710,16 @@ mod tests {
#[test]
fn pager_wrap_cache_invalidates_on_append() {
let long = "Another long line for wrapping behavior verification.";
let mut pv = PagerView::new(vec![long.into()], "T".to_string(), 0);
let mut pv = PagerView::new(vec![Text::from(vec![long.into()])], "T".to_string(), 0);
pv.ensure_wrapped(28);
let (w1, _) = pv.cached();
let w1 = pv.cached();
let len1 = w1.len();
// Append new lines should cause ensure_wrapped to rebuild due to len change
pv.lines.extend([long.into(), long.into()]);
pv.texts.push(Text::from(vec![long.into()]));
pv.texts.push(Text::from(vec![long.into()]));
pv.ensure_wrapped(28);
let (w2, _) = pv.cached();
let w2 = pv.cached();
assert!(
w2.len() >= len1,
"wrapped length should grow or stay same after append"

View File

@@ -4,10 +4,10 @@ expression: term.backend()
---
"/ T R A N S C R I P T / / / / / / / / / "
"alpha "
" "
"beta "
" "
"gamma "
"~ "
"~ "
"───────────────────────────────── 100% ─"
" ↑/↓ scroll PgUp/PgDn page Home/End "
" q quit Esc edit prev "