Moving token_info to ConversationHistory (#5581)

I want to centralize input processing and management to
`ConversationHistory`. This would need `ConversationHistory` to have
access to `token_info` (i.e. preventing adding a big input to the
history). Besides, it makes more sense to have it on
`ConversationHistory` than `state`.
This commit is contained in:
Ahmed Ibrahim
2025-10-23 20:30:58 -07:00
committed by GitHub
parent e258f0f044
commit 0f4fd33ddd
2 changed files with 39 additions and 17 deletions

View File

@@ -1,5 +1,7 @@
use codex_protocol::models::FunctionCallOutputPayload;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::TokenUsage;
use codex_protocol::protocol::TokenUsageInfo;
use tracing::error;
/// Transcript of conversation history
@@ -7,11 +9,28 @@ use tracing::error;
pub(crate) struct ConversationHistory {
/// The oldest items are at the beginning of the vector.
items: Vec<ResponseItem>,
token_info: Option<TokenUsageInfo>,
}
impl ConversationHistory {
pub(crate) fn new() -> Self {
Self { items: Vec::new() }
Self {
items: Vec::new(),
token_info: TokenUsageInfo::new_or_append(&None, &None, None),
}
}
pub(crate) fn token_info(&self) -> Option<TokenUsageInfo> {
self.token_info.clone()
}
pub(crate) fn set_token_usage_full(&mut self, context_window: i64) {
match &mut self.token_info {
Some(info) => info.fill_to_context_window(context_window),
None => {
self.token_info = Some(TokenUsageInfo::full_context_window(context_window));
}
}
}
/// `items` is ordered from oldest to newest.
@@ -301,6 +320,18 @@ impl ConversationHistory {
self.items.remove(pos);
}
}
pub(crate) fn update_token_info(
&mut self,
usage: &TokenUsage,
model_context_window: Option<i64>,
) {
self.token_info = TokenUsageInfo::new_or_append(
&self.token_info,
&Some(usage.clone()),
model_context_window,
);
}
}
#[inline]

View File

@@ -12,7 +12,6 @@ use crate::protocol::TokenUsageInfo;
pub(crate) struct SessionState {
pub(crate) session_configuration: SessionConfiguration,
pub(crate) history: ConversationHistory,
pub(crate) token_info: Option<TokenUsageInfo>,
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
}
@@ -22,7 +21,6 @@ impl SessionState {
Self {
session_configuration,
history: ConversationHistory::new(),
token_info: None,
latest_rate_limits: None,
}
}
@@ -54,11 +52,11 @@ impl SessionState {
usage: &TokenUsage,
model_context_window: Option<i64>,
) {
self.token_info = TokenUsageInfo::new_or_append(
&self.token_info,
&Some(usage.clone()),
model_context_window,
);
self.history.update_token_info(usage, model_context_window);
}
pub(crate) fn token_info(&self) -> Option<TokenUsageInfo> {
self.history.token_info()
}
pub(crate) fn set_rate_limits(&mut self, snapshot: RateLimitSnapshot) {
@@ -68,17 +66,10 @@ impl SessionState {
pub(crate) fn token_info_and_rate_limits(
&self,
) -> (Option<TokenUsageInfo>, Option<RateLimitSnapshot>) {
(self.token_info.clone(), self.latest_rate_limits.clone())
(self.token_info(), self.latest_rate_limits.clone())
}
pub(crate) fn set_token_usage_full(&mut self, context_window: i64) {
match &mut self.token_info {
Some(info) => info.fill_to_context_window(context_window),
None => {
self.token_info = Some(TokenUsageInfo::full_context_window(context_window));
}
}
self.history.set_token_usage_full(context_window);
}
// Pending input/approval moved to TurnState.
}