Move token usage/context information to session level (#3221)

Move context information into the main loop so it can be used to
interrupt the loop or start auto-compaction.
This commit is contained in:
pakrym-oai
2025-09-06 08:19:23 -07:00
committed by GitHub
parent 70a6d4b1b4
commit 0269096229
9 changed files with 151 additions and 117 deletions

View File

@@ -29,6 +29,7 @@ use codex_core::protocol::PatchApplyBeginEvent;
use codex_core::protocol::StreamErrorEvent;
use codex_core::protocol::TaskCompleteEvent;
use codex_core::protocol::TokenUsage;
use codex_core::protocol::TokenUsageInfo;
use codex_core::protocol::TurnAbortReason;
use codex_core::protocol::TurnDiffEvent;
use codex_core::protocol::UserMessageEvent;
@@ -109,8 +110,7 @@ pub(crate) struct ChatWidget {
active_exec_cell: Option<ExecCell>,
config: Config,
initial_user_message: Option<UserMessage>,
total_token_usage: TokenUsage,
last_token_usage: TokenUsage,
token_info: Option<TokenUsageInfo>,
// Stream lifecycle controller
stream: StreamController,
running_commands: HashMap<String, RunningCommand>,
@@ -259,16 +259,10 @@ impl ChatWidget {
self.maybe_send_next_queued_input();
}
fn on_token_count(&mut self, token_usage: TokenUsage) {
self.total_token_usage = add_token_usage(&self.total_token_usage, &token_usage);
self.last_token_usage = token_usage;
self.bottom_pane.set_token_usage(
self.total_token_usage.clone(),
self.last_token_usage.clone(),
self.config.model_context_window,
);
pub(crate) fn set_token_info(&mut self, info: Option<TokenUsageInfo>) {
self.bottom_pane.set_token_usage(info.clone());
self.token_info = info;
}
/// Finalize any active exec as failed, push an error message into history,
/// and stop/clear running UI state.
fn finalize_turn_with_error_message(&mut self, message: String) {
@@ -659,8 +653,7 @@ impl ChatWidget {
initial_prompt.unwrap_or_default(),
initial_images,
),
total_token_usage: TokenUsage::default(),
last_token_usage: TokenUsage::default(),
token_info: None,
stream: StreamController::new(config),
running_commands: HashMap::new(),
task_complete_pending: false,
@@ -712,8 +705,7 @@ impl ChatWidget {
initial_prompt.unwrap_or_default(),
initial_images,
),
total_token_usage: TokenUsage::default(),
last_token_usage: TokenUsage::default(),
token_info: None,
stream: StreamController::new(config),
running_commands: HashMap::new(),
task_complete_pending: false,
@@ -1050,7 +1042,7 @@ impl ChatWidget {
EventMsg::AgentReasoningSectionBreak(_) => self.on_reasoning_section_break(),
EventMsg::TaskStarted(_) => self.on_task_started(),
EventMsg::TaskComplete(TaskCompleteEvent { .. }) => self.on_task_complete(),
EventMsg::TokenCount(token_usage) => self.on_token_count(token_usage),
EventMsg::TokenCount(ev) => self.set_token_info(ev.info),
EventMsg::Error(ErrorEvent { message }) => self.on_error(message),
EventMsg::TurnAborted(ev) => match ev.reason {
TurnAbortReason::Interrupted => {
@@ -1157,9 +1149,16 @@ impl ChatWidget {
}
pub(crate) fn add_status_output(&mut self) {
let default_usage;
let usage_ref = if let Some(ti) = &self.token_info {
&ti.total_token_usage
} else {
default_usage = TokenUsage::default();
&default_usage
};
self.add_to_history(history_cell::new_status_output(
&self.config,
&self.total_token_usage,
usage_ref,
&self.session_id,
));
}
@@ -1352,8 +1351,11 @@ impl ChatWidget {
self.submit_user_message(text.into());
}
pub(crate) fn token_usage(&self) -> &TokenUsage {
&self.total_token_usage
pub(crate) fn token_usage(&self) -> TokenUsage {
self.token_info
.as_ref()
.map(|ti| ti.total_token_usage.clone())
.unwrap_or_default()
}
pub(crate) fn session_id(&self) -> Option<Uuid> {
@@ -1367,12 +1369,8 @@ impl ChatWidget {
}
pub(crate) fn clear_token_usage(&mut self) {
self.total_token_usage = TokenUsage::default();
self.bottom_pane.set_token_usage(
self.total_token_usage.clone(),
self.last_token_usage.clone(),
self.config.model_context_window,
);
self.token_info = None;
self.bottom_pane.set_token_usage(None);
}
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
@@ -1405,34 +1403,6 @@ const EXAMPLE_PROMPTS: [&str; 6] = [
"Improve documentation in @filename",
];
fn add_token_usage(current_usage: &TokenUsage, new_usage: &TokenUsage) -> TokenUsage {
let cached_input_tokens = match (
current_usage.cached_input_tokens,
new_usage.cached_input_tokens,
) {
(Some(current), Some(new)) => Some(current + new),
(Some(current), None) => Some(current),
(None, Some(new)) => Some(new),
(None, None) => None,
};
let reasoning_output_tokens = match (
current_usage.reasoning_output_tokens,
new_usage.reasoning_output_tokens,
) {
(Some(current), Some(new)) => Some(current + new),
(Some(current), None) => Some(current),
(None, Some(new)) => Some(new),
(None, None) => None,
};
TokenUsage {
input_tokens: current_usage.input_tokens + new_usage.input_tokens,
cached_input_tokens,
output_tokens: current_usage.output_tokens + new_usage.output_tokens,
reasoning_output_tokens,
total_tokens: current_usage.total_tokens + new_usage.total_tokens,
}
}
// Extract the first bold (Markdown) element in the form **...** from `s`.
// Returns the inner text if found; otherwise `None`.
fn extract_first_bold(s: &str) -> Option<String> {