Phase 1: Repository & Infrastructure Setup

- Renamed directories: codex-rs -> llmx-rs, codex-cli -> llmx-cli
- Updated package.json files:
  - Root: llmx-monorepo
  - CLI: @llmx/llmx
  - SDK: @llmx/llmx-sdk
- Updated pnpm workspace configuration
- Renamed binary: codex.js -> llmx.js
- Updated environment variables: CODEX_* -> LLMX_*
- Changed repository URLs to valknar/llmx

🤖 Generated with Claude Code
This commit is contained in:
Sebastian Krüger
2025-11-11 14:01:52 +01:00
parent 052b052832
commit f237fe560d
1151 changed files with 41 additions and 35 deletions

View File

@@ -0,0 +1,8 @@
#[derive(Debug, Clone)]
pub(crate) enum StatusAccountDisplay {
ChatGpt {
email: Option<String>,
plan: Option<String>,
},
ApiKey,
}

View File

@@ -0,0 +1,376 @@
use crate::history_cell::CompositeHistoryCell;
use crate::history_cell::HistoryCell;
use crate::history_cell::PlainHistoryCell;
use crate::history_cell::with_border_with_inner_width;
use crate::version::CODEX_CLI_VERSION;
use chrono::DateTime;
use chrono::Local;
use codex_common::create_config_summary_entries;
use codex_core::config::Config;
use codex_core::protocol::SandboxPolicy;
use codex_core::protocol::TokenUsage;
use codex_protocol::ConversationId;
use ratatui::prelude::*;
use ratatui::style::Stylize;
use std::collections::BTreeSet;
use std::path::PathBuf;
use super::account::StatusAccountDisplay;
use super::format::FieldFormatter;
use super::format::line_display_width;
use super::format::push_label;
use super::format::truncate_line_to_width;
use super::helpers::compose_account_display;
use super::helpers::compose_agents_summary;
use super::helpers::compose_model_display;
use super::helpers::format_directory_display;
use super::helpers::format_tokens_compact;
use super::rate_limits::RateLimitSnapshotDisplay;
use super::rate_limits::StatusRateLimitData;
use super::rate_limits::StatusRateLimitRow;
use super::rate_limits::compose_rate_limit_data;
use super::rate_limits::format_status_limit_summary;
use super::rate_limits::render_status_limit_progress_bar;
use crate::wrapping::RtOptions;
use crate::wrapping::word_wrap_lines;
#[derive(Debug, Clone)]
struct StatusContextWindowData {
percent_remaining: i64,
tokens_in_context: i64,
window: i64,
}
#[derive(Debug, Clone)]
pub(crate) struct StatusTokenUsageData {
total: i64,
input: i64,
output: i64,
context_window: Option<StatusContextWindowData>,
}
#[derive(Debug)]
struct StatusHistoryCell {
model_name: String,
model_details: Vec<String>,
directory: PathBuf,
approval: String,
sandbox: String,
agents_summary: String,
account: Option<StatusAccountDisplay>,
session_id: Option<String>,
token_usage: StatusTokenUsageData,
rate_limits: StatusRateLimitData,
}
pub(crate) fn new_status_output(
config: &Config,
total_usage: &TokenUsage,
context_usage: Option<&TokenUsage>,
session_id: &Option<ConversationId>,
rate_limits: Option<&RateLimitSnapshotDisplay>,
now: DateTime<Local>,
) -> CompositeHistoryCell {
let command = PlainHistoryCell::new(vec!["/status".magenta().into()]);
let card = StatusHistoryCell::new(
config,
total_usage,
context_usage,
session_id,
rate_limits,
now,
);
CompositeHistoryCell::new(vec![Box::new(command), Box::new(card)])
}
impl StatusHistoryCell {
fn new(
config: &Config,
total_usage: &TokenUsage,
context_usage: Option<&TokenUsage>,
session_id: &Option<ConversationId>,
rate_limits: Option<&RateLimitSnapshotDisplay>,
now: DateTime<Local>,
) -> Self {
let config_entries = create_config_summary_entries(config);
let (model_name, model_details) = compose_model_display(config, &config_entries);
let approval = config_entries
.iter()
.find(|(k, _)| *k == "approval")
.map(|(_, v)| v.clone())
.unwrap_or_else(|| "<unknown>".to_string());
let sandbox = match &config.sandbox_policy {
SandboxPolicy::DangerFullAccess => "danger-full-access".to_string(),
SandboxPolicy::ReadOnly => "read-only".to_string(),
SandboxPolicy::WorkspaceWrite { .. } => "workspace-write".to_string(),
};
let agents_summary = compose_agents_summary(config);
let account = compose_account_display(config);
let session_id = session_id.as_ref().map(std::string::ToString::to_string);
let context_window = config.model_context_window.and_then(|window| {
context_usage.map(|usage| StatusContextWindowData {
percent_remaining: usage.percent_of_context_window_remaining(window),
tokens_in_context: usage.tokens_in_context_window(),
window,
})
});
let token_usage = StatusTokenUsageData {
total: total_usage.blended_total(),
input: total_usage.non_cached_input(),
output: total_usage.output_tokens,
context_window,
};
let rate_limits = compose_rate_limit_data(rate_limits, now);
Self {
model_name,
model_details,
directory: config.cwd.clone(),
approval,
sandbox,
agents_summary,
account,
session_id,
token_usage,
rate_limits,
}
}
fn token_usage_spans(&self) -> Vec<Span<'static>> {
let total_fmt = format_tokens_compact(self.token_usage.total);
let input_fmt = format_tokens_compact(self.token_usage.input);
let output_fmt = format_tokens_compact(self.token_usage.output);
vec![
Span::from(total_fmt),
Span::from(" total "),
Span::from(" (").dim(),
Span::from(input_fmt).dim(),
Span::from(" input").dim(),
Span::from(" + ").dim(),
Span::from(output_fmt).dim(),
Span::from(" output").dim(),
Span::from(")").dim(),
]
}
fn context_window_spans(&self) -> Option<Vec<Span<'static>>> {
let context = self.token_usage.context_window.as_ref()?;
let percent = context.percent_remaining;
let used_fmt = format_tokens_compact(context.tokens_in_context);
let window_fmt = format_tokens_compact(context.window);
Some(vec![
Span::from(format!("{percent}% left")),
Span::from(" (").dim(),
Span::from(used_fmt).dim(),
Span::from(" used / ").dim(),
Span::from(window_fmt).dim(),
Span::from(")").dim(),
])
}
fn rate_limit_lines(
&self,
available_inner_width: usize,
formatter: &FieldFormatter,
) -> Vec<Line<'static>> {
match &self.rate_limits {
StatusRateLimitData::Available(rows_data) => {
if rows_data.is_empty() {
return vec![
formatter.line("Limits", vec![Span::from("data not available yet").dim()]),
];
}
self.rate_limit_row_lines(rows_data, available_inner_width, formatter)
}
StatusRateLimitData::Stale(rows_data) => {
let mut lines =
self.rate_limit_row_lines(rows_data, available_inner_width, formatter);
lines.push(formatter.line(
"Warning",
vec![Span::from("limits may be stale - start new turn to refresh.").dim()],
));
lines
}
StatusRateLimitData::Missing => {
vec![formatter.line("Limits", vec![Span::from("data not available yet").dim()])]
}
}
}
fn rate_limit_row_lines(
&self,
rows: &[StatusRateLimitRow],
available_inner_width: usize,
formatter: &FieldFormatter,
) -> Vec<Line<'static>> {
let mut lines = Vec::with_capacity(rows.len().saturating_mul(2));
for row in rows {
let percent_remaining = (100.0 - row.percent_used).clamp(0.0, 100.0);
let value_spans = vec![
Span::from(render_status_limit_progress_bar(percent_remaining)),
Span::from(" "),
Span::from(format_status_limit_summary(percent_remaining)),
];
let base_spans = formatter.full_spans(row.label.as_str(), value_spans);
let base_line = Line::from(base_spans.clone());
if let Some(resets_at) = row.resets_at.as_ref() {
let resets_span = Span::from(format!("(resets {resets_at})")).dim();
let mut inline_spans = base_spans.clone();
inline_spans.push(Span::from(" ").dim());
inline_spans.push(resets_span.clone());
if line_display_width(&Line::from(inline_spans.clone())) <= available_inner_width {
lines.push(Line::from(inline_spans));
} else {
lines.push(base_line);
lines.push(formatter.continuation(vec![resets_span]));
}
} else {
lines.push(base_line);
}
}
lines
}
fn collect_rate_limit_labels(&self, seen: &mut BTreeSet<String>, labels: &mut Vec<String>) {
match &self.rate_limits {
StatusRateLimitData::Available(rows) => {
if rows.is_empty() {
push_label(labels, seen, "Limits");
} else {
for row in rows {
push_label(labels, seen, row.label.as_str());
}
}
}
StatusRateLimitData::Stale(rows) => {
for row in rows {
push_label(labels, seen, row.label.as_str());
}
push_label(labels, seen, "Warning");
}
StatusRateLimitData::Missing => push_label(labels, seen, "Limits"),
}
}
}
impl HistoryCell for StatusHistoryCell {
fn display_lines(&self, width: u16) -> Vec<Line<'static>> {
let mut lines: Vec<Line<'static>> = Vec::new();
lines.push(Line::from(vec![
Span::from(format!("{}>_ ", FieldFormatter::INDENT)).dim(),
Span::from("OpenAI Codex").bold(),
Span::from(" ").dim(),
Span::from(format!("(v{CODEX_CLI_VERSION})")).dim(),
]));
lines.push(Line::from(Vec::<Span<'static>>::new()));
let available_inner_width = usize::from(width.saturating_sub(4));
if available_inner_width == 0 {
return Vec::new();
}
let account_value = self.account.as_ref().map(|account| match account {
StatusAccountDisplay::ChatGpt { email, plan } => match (email, plan) {
(Some(email), Some(plan)) => format!("{email} ({plan})"),
(Some(email), None) => email.clone(),
(None, Some(plan)) => plan.clone(),
(None, None) => "ChatGPT".to_string(),
},
StatusAccountDisplay::ApiKey => {
"API key configured (run codex login to use ChatGPT)".to_string()
}
});
let mut labels: Vec<String> =
vec!["Model", "Directory", "Approval", "Sandbox", "Agents.md"]
.into_iter()
.map(str::to_string)
.collect();
let mut seen: BTreeSet<String> = labels.iter().cloned().collect();
if account_value.is_some() {
push_label(&mut labels, &mut seen, "Account");
}
if self.session_id.is_some() {
push_label(&mut labels, &mut seen, "Session");
}
push_label(&mut labels, &mut seen, "Token usage");
if self.token_usage.context_window.is_some() {
push_label(&mut labels, &mut seen, "Context window");
}
self.collect_rate_limit_labels(&mut seen, &mut labels);
let formatter = FieldFormatter::from_labels(labels.iter().map(String::as_str));
let value_width = formatter.value_width(available_inner_width);
let note_first_line = Line::from(vec![
Span::from("Visit ").cyan(),
"https://chatgpt.com/codex/settings/usage"
.cyan()
.underlined(),
Span::from(" for up-to-date").cyan(),
]);
let note_second_line = Line::from(vec![
Span::from("information on rate limits and credits").cyan(),
]);
let note_lines = word_wrap_lines(
[note_first_line, note_second_line],
RtOptions::new(available_inner_width),
);
lines.extend(note_lines);
lines.push(Line::from(Vec::<Span<'static>>::new()));
let mut model_spans = vec![Span::from(self.model_name.clone())];
if !self.model_details.is_empty() {
model_spans.push(Span::from(" (").dim());
model_spans.push(Span::from(self.model_details.join(", ")).dim());
model_spans.push(Span::from(")").dim());
}
let directory_value = format_directory_display(&self.directory, Some(value_width));
lines.push(formatter.line("Model", model_spans));
lines.push(formatter.line("Directory", vec![Span::from(directory_value)]));
lines.push(formatter.line("Approval", vec![Span::from(self.approval.clone())]));
lines.push(formatter.line("Sandbox", vec![Span::from(self.sandbox.clone())]));
lines.push(formatter.line("Agents.md", vec![Span::from(self.agents_summary.clone())]));
if let Some(account_value) = account_value {
lines.push(formatter.line("Account", vec![Span::from(account_value)]));
}
if let Some(session) = self.session_id.as_ref() {
lines.push(formatter.line("Session", vec![Span::from(session.clone())]));
}
lines.push(Line::from(Vec::<Span<'static>>::new()));
// Hide token usage only for ChatGPT subscribers
if !matches!(self.account, Some(StatusAccountDisplay::ChatGpt { .. })) {
lines.push(formatter.line("Token usage", self.token_usage_spans()));
}
if let Some(spans) = self.context_window_spans() {
lines.push(formatter.line("Context window", spans));
}
lines.extend(self.rate_limit_lines(available_inner_width, &formatter));
let content_width = lines.iter().map(line_display_width).max().unwrap_or(0);
let inner_width = content_width.min(available_inner_width);
let truncated_lines: Vec<Line<'static>> = lines
.into_iter()
.map(|line| truncate_line_to_width(line, inner_width))
.collect();
with_border_with_inner_width(truncated_lines, inner_width)
}
}

View File

@@ -0,0 +1,147 @@
use ratatui::prelude::*;
use ratatui::style::Stylize;
use std::collections::BTreeSet;
use unicode_width::UnicodeWidthChar;
use unicode_width::UnicodeWidthStr;
#[derive(Debug, Clone)]
pub(crate) struct FieldFormatter {
indent: &'static str,
label_width: usize,
value_offset: usize,
value_indent: String,
}
impl FieldFormatter {
pub(crate) const INDENT: &'static str = " ";
pub(crate) fn from_labels<S>(labels: impl IntoIterator<Item = S>) -> Self
where
S: AsRef<str>,
{
let label_width = labels
.into_iter()
.map(|label| UnicodeWidthStr::width(label.as_ref()))
.max()
.unwrap_or(0);
let indent_width = UnicodeWidthStr::width(Self::INDENT);
let value_offset = indent_width + label_width + 1 + 3;
Self {
indent: Self::INDENT,
label_width,
value_offset,
value_indent: " ".repeat(value_offset),
}
}
pub(crate) fn line(
&self,
label: &'static str,
value_spans: Vec<Span<'static>>,
) -> Line<'static> {
Line::from(self.full_spans(label, value_spans))
}
pub(crate) fn continuation(&self, mut spans: Vec<Span<'static>>) -> Line<'static> {
let mut all_spans = Vec::with_capacity(spans.len() + 1);
all_spans.push(Span::from(self.value_indent.clone()).dim());
all_spans.append(&mut spans);
Line::from(all_spans)
}
pub(crate) fn value_width(&self, available_inner_width: usize) -> usize {
available_inner_width.saturating_sub(self.value_offset)
}
pub(crate) fn full_spans(
&self,
label: &str,
mut value_spans: Vec<Span<'static>>,
) -> Vec<Span<'static>> {
let mut spans = Vec::with_capacity(value_spans.len() + 1);
spans.push(self.label_span(label));
spans.append(&mut value_spans);
spans
}
fn label_span(&self, label: &str) -> Span<'static> {
let mut buf = String::with_capacity(self.value_offset);
buf.push_str(self.indent);
buf.push_str(label);
buf.push(':');
let label_width = UnicodeWidthStr::width(label);
let padding = 3 + self.label_width.saturating_sub(label_width);
for _ in 0..padding {
buf.push(' ');
}
Span::from(buf).dim()
}
}
pub(crate) fn push_label(labels: &mut Vec<String>, seen: &mut BTreeSet<String>, label: &str) {
if seen.contains(label) {
return;
}
let owned = label.to_string();
seen.insert(owned.clone());
labels.push(owned);
}
pub(crate) fn line_display_width(line: &Line<'static>) -> usize {
line.iter()
.map(|span| UnicodeWidthStr::width(span.content.as_ref()))
.sum()
}
pub(crate) fn truncate_line_to_width(line: Line<'static>, max_width: usize) -> Line<'static> {
if max_width == 0 {
return Line::from(Vec::<Span<'static>>::new());
}
let mut used = 0usize;
let mut spans_out: Vec<Span<'static>> = Vec::new();
for span in line.spans {
let text = span.content.into_owned();
let style = span.style;
let span_width = UnicodeWidthStr::width(text.as_str());
if span_width == 0 {
spans_out.push(Span::styled(text, style));
continue;
}
if used >= max_width {
break;
}
if used + span_width <= max_width {
used += span_width;
spans_out.push(Span::styled(text, style));
continue;
}
let mut truncated = String::new();
for ch in text.chars() {
let ch_width = UnicodeWidthChar::width(ch).unwrap_or(0);
if used + ch_width > max_width {
break;
}
truncated.push(ch);
used += ch_width;
}
if !truncated.is_empty() {
spans_out.push(Span::styled(truncated, style));
}
break;
}
Line::from(spans_out)
}

View File

@@ -0,0 +1,189 @@
use crate::exec_command::relativize_to_home;
use crate::text_formatting;
use chrono::DateTime;
use chrono::Local;
use codex_core::auth::load_auth_dot_json;
use codex_core::config::Config;
use codex_core::project_doc::discover_project_doc_paths;
use std::path::Path;
use unicode_width::UnicodeWidthStr;
use super::account::StatusAccountDisplay;
fn normalize_agents_display_path(path: &Path) -> String {
dunce::simplified(path).display().to_string()
}
pub(crate) fn compose_model_display(
config: &Config,
entries: &[(&str, String)],
) -> (String, Vec<String>) {
let mut details: Vec<String> = Vec::new();
if let Some((_, effort)) = entries.iter().find(|(k, _)| *k == "reasoning effort") {
details.push(format!("reasoning {}", effort.to_ascii_lowercase()));
}
if let Some((_, summary)) = entries.iter().find(|(k, _)| *k == "reasoning summaries") {
let summary = summary.trim();
if summary.eq_ignore_ascii_case("none") || summary.eq_ignore_ascii_case("off") {
details.push("summaries off".to_string());
} else if !summary.is_empty() {
details.push(format!("summaries {}", summary.to_ascii_lowercase()));
}
}
(config.model.clone(), details)
}
pub(crate) fn compose_agents_summary(config: &Config) -> String {
match discover_project_doc_paths(config) {
Ok(paths) => {
let mut rels: Vec<String> = Vec::new();
for p in paths {
let file_name = p
.file_name()
.map(|name| name.to_string_lossy().to_string())
.unwrap_or_else(|| "<unknown>".to_string());
let display = if let Some(parent) = p.parent() {
if parent == config.cwd {
file_name.clone()
} else {
let mut cur = config.cwd.as_path();
let mut ups = 0usize;
let mut reached = false;
while let Some(c) = cur.parent() {
if cur == parent {
reached = true;
break;
}
cur = c;
ups += 1;
}
if reached {
let up = format!("..{}", std::path::MAIN_SEPARATOR);
format!("{}{}", up.repeat(ups), file_name)
} else if let Ok(stripped) = p.strip_prefix(&config.cwd) {
normalize_agents_display_path(stripped)
} else {
normalize_agents_display_path(&p)
}
}
} else {
normalize_agents_display_path(&p)
};
rels.push(display);
}
if rels.is_empty() {
"<none>".to_string()
} else {
rels.join(", ")
}
}
Err(_) => "<none>".to_string(),
}
}
pub(crate) fn compose_account_display(config: &Config) -> Option<StatusAccountDisplay> {
let auth =
load_auth_dot_json(&config.codex_home, config.cli_auth_credentials_store_mode).ok()??;
if let Some(tokens) = auth.tokens.as_ref() {
let info = &tokens.id_token;
let email = info.email.clone();
let plan = info.get_chatgpt_plan_type().as_deref().map(title_case);
return Some(StatusAccountDisplay::ChatGpt { email, plan });
}
if let Some(key) = auth.openai_api_key
&& !key.is_empty()
{
return Some(StatusAccountDisplay::ApiKey);
}
None
}
pub(crate) fn format_tokens_compact(value: i64) -> String {
let value = value.max(0);
if value == 0 {
return "0".to_string();
}
if value < 1_000 {
return value.to_string();
}
let value_f64 = value as f64;
let (scaled, suffix) = if value >= 1_000_000_000_000 {
(value_f64 / 1_000_000_000_000.0, "T")
} else if value >= 1_000_000_000 {
(value_f64 / 1_000_000_000.0, "B")
} else if value >= 1_000_000 {
(value_f64 / 1_000_000.0, "M")
} else {
(value_f64 / 1_000.0, "K")
};
let decimals = if scaled < 10.0 {
2
} else if scaled < 100.0 {
1
} else {
0
};
let mut formatted = format!("{scaled:.decimals$}");
if formatted.contains('.') {
while formatted.ends_with('0') {
formatted.pop();
}
if formatted.ends_with('.') {
formatted.pop();
}
}
format!("{formatted}{suffix}")
}
pub(crate) fn format_directory_display(directory: &Path, max_width: Option<usize>) -> String {
let formatted = if let Some(rel) = relativize_to_home(directory) {
if rel.as_os_str().is_empty() {
"~".to_string()
} else {
format!("~{}{}", std::path::MAIN_SEPARATOR, rel.display())
}
} else {
directory.display().to_string()
};
if let Some(max_width) = max_width {
if max_width == 0 {
return String::new();
}
if UnicodeWidthStr::width(formatted.as_str()) > max_width {
return text_formatting::center_truncate_path(&formatted, max_width);
}
}
formatted
}
pub(crate) fn format_reset_timestamp(dt: DateTime<Local>, captured_at: DateTime<Local>) -> String {
let time = dt.format("%H:%M").to_string();
if dt.date_naive() == captured_at.date_naive() {
time
} else {
format!("{time} on {}", dt.format("%-d %b"))
}
}
pub(crate) fn title_case(s: &str) -> String {
if s.is_empty() {
return String::new();
}
let mut chars = s.chars();
let first = match chars.next() {
Some(c) => c,
None => return String::new(),
};
let rest: String = chars.as_str().to_ascii_lowercase();
first.to_uppercase().collect::<String>() + &rest
}

View File

@@ -0,0 +1,12 @@
mod account;
mod card;
mod format;
mod helpers;
mod rate_limits;
pub(crate) use card::new_status_output;
pub(crate) use rate_limits::RateLimitSnapshotDisplay;
pub(crate) use rate_limits::rate_limit_snapshot_display;
#[cfg(test)]
mod tests;

View File

@@ -0,0 +1,153 @@
use crate::chatwidget::get_limits_duration;
use super::helpers::format_reset_timestamp;
use chrono::DateTime;
use chrono::Duration as ChronoDuration;
use chrono::Local;
use chrono::Utc;
use codex_core::protocol::RateLimitSnapshot;
use codex_core::protocol::RateLimitWindow;
const STATUS_LIMIT_BAR_SEGMENTS: usize = 20;
const STATUS_LIMIT_BAR_FILLED: &str = "";
const STATUS_LIMIT_BAR_EMPTY: &str = "";
#[derive(Debug, Clone)]
pub(crate) struct StatusRateLimitRow {
pub label: String,
pub percent_used: f64,
pub resets_at: Option<String>,
}
#[derive(Debug, Clone)]
pub(crate) enum StatusRateLimitData {
Available(Vec<StatusRateLimitRow>),
Stale(Vec<StatusRateLimitRow>),
Missing,
}
pub(crate) const RATE_LIMIT_STALE_THRESHOLD_MINUTES: i64 = 15;
#[derive(Debug, Clone)]
pub(crate) struct RateLimitWindowDisplay {
pub used_percent: f64,
pub resets_at: Option<String>,
pub window_minutes: Option<i64>,
}
impl RateLimitWindowDisplay {
fn from_window(window: &RateLimitWindow, captured_at: DateTime<Local>) -> Self {
let resets_at = window
.resets_at
.and_then(|seconds| DateTime::<Utc>::from_timestamp(seconds, 0))
.map(|dt| dt.with_timezone(&Local))
.map(|dt| format_reset_timestamp(dt, captured_at));
Self {
used_percent: window.used_percent,
resets_at,
window_minutes: window.window_minutes,
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct RateLimitSnapshotDisplay {
pub captured_at: DateTime<Local>,
pub primary: Option<RateLimitWindowDisplay>,
pub secondary: Option<RateLimitWindowDisplay>,
}
pub(crate) fn rate_limit_snapshot_display(
snapshot: &RateLimitSnapshot,
captured_at: DateTime<Local>,
) -> RateLimitSnapshotDisplay {
RateLimitSnapshotDisplay {
captured_at,
primary: snapshot
.primary
.as_ref()
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
secondary: snapshot
.secondary
.as_ref()
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
}
}
pub(crate) fn compose_rate_limit_data(
snapshot: Option<&RateLimitSnapshotDisplay>,
now: DateTime<Local>,
) -> StatusRateLimitData {
match snapshot {
Some(snapshot) => {
let mut rows = Vec::with_capacity(2);
if let Some(primary) = snapshot.primary.as_ref() {
let label: String = primary
.window_minutes
.map(get_limits_duration)
.unwrap_or_else(|| "5h".to_string());
let label = capitalize_first(&label);
rows.push(StatusRateLimitRow {
label: format!("{label} limit"),
percent_used: primary.used_percent,
resets_at: primary.resets_at.clone(),
});
}
if let Some(secondary) = snapshot.secondary.as_ref() {
let label: String = secondary
.window_minutes
.map(get_limits_duration)
.unwrap_or_else(|| "weekly".to_string());
let label = capitalize_first(&label);
rows.push(StatusRateLimitRow {
label: format!("{label} limit"),
percent_used: secondary.used_percent,
resets_at: secondary.resets_at.clone(),
});
}
let is_stale = now.signed_duration_since(snapshot.captured_at)
> ChronoDuration::minutes(RATE_LIMIT_STALE_THRESHOLD_MINUTES);
if rows.is_empty() {
StatusRateLimitData::Available(vec![])
} else if is_stale {
StatusRateLimitData::Stale(rows)
} else {
StatusRateLimitData::Available(rows)
}
}
None => StatusRateLimitData::Missing,
}
}
pub(crate) fn render_status_limit_progress_bar(percent_remaining: f64) -> String {
let ratio = (percent_remaining / 100.0).clamp(0.0, 1.0);
let filled = (ratio * STATUS_LIMIT_BAR_SEGMENTS as f64).round() as usize;
let filled = filled.min(STATUS_LIMIT_BAR_SEGMENTS);
let empty = STATUS_LIMIT_BAR_SEGMENTS.saturating_sub(filled);
format!(
"[{}{}]",
STATUS_LIMIT_BAR_FILLED.repeat(filled),
STATUS_LIMIT_BAR_EMPTY.repeat(empty)
)
}
pub(crate) fn format_status_limit_summary(percent_remaining: f64) -> String {
format!("{percent_remaining:.0}% left")
}
fn capitalize_first(label: &str) -> String {
let mut chars = label.chars();
match chars.next() {
Some(first) => {
let mut capitalized = first.to_uppercase().collect::<String>();
capitalized.push_str(chars.as_str());
capitalized
}
None => String::new(),
}
}

View File

@@ -0,0 +1,22 @@
---
source: tui/src/status/tests.rs
expression: sanitized
---
/status
╭────────────────────────────────────────────────────────────────────────────╮
│ >_ OpenAI Codex (v0.0.0) │
│ │
│ Visit https://chatgpt.com/codex/settings/usage for up-to-date │
│ information on rate limits and credits │
│ │
│ Model: gpt-5-codex (reasoning none, summaries auto) │
│ Directory: [[workspace]] │
│ Approval: on-request │
│ Sandbox: read-only │
│ Agents.md: <none> │
│ │
│ Token usage: 1.2K total (800 input + 400 output) │
│ Context window: 100% left (1.2K used / 272K) │
│ Monthly limit: [██████████████████░░] 88% left (resets 07:08 on 7 May) │
╰────────────────────────────────────────────────────────────────────────────╯

View File

@@ -0,0 +1,23 @@
---
source: tui/src/status/tests.rs
expression: sanitized
---
/status
╭─────────────────────────────────────────────────────────────────────╮
│ >_ OpenAI Codex (v0.0.0) │
│ │
│ Visit https://chatgpt.com/codex/settings/usage for up-to-date │
│ information on rate limits and credits │
│ │
│ Model: gpt-5-codex (reasoning high, summaries detailed) │
│ Directory: [[workspace]] │
│ Approval: on-request │
│ Sandbox: workspace-write │
│ Agents.md: <none> │
│ │
│ Token usage: 1.9K total (1K input + 900 output) │
│ Context window: 100% left (2.25K used / 272K) │
│ 5h limit: [██████░░░░░░░░░░░░░░] 28% left (resets 03:14) │
│ Weekly limit: [███████████░░░░░░░░░] 55% left (resets 03:24) │
╰─────────────────────────────────────────────────────────────────────╯

View File

@@ -0,0 +1,22 @@
---
source: tui/src/status/tests.rs
expression: sanitized
---
/status
╭─────────────────────────────────────────────────────────────────╮
│ >_ OpenAI Codex (v0.0.0) │
│ │
│ Visit https://chatgpt.com/codex/settings/usage for up-to-date │
│ information on rate limits and credits │
│ │
│ Model: gpt-5-codex (reasoning none, summaries auto) │
│ Directory: [[workspace]] │
│ Approval: on-request │
│ Sandbox: read-only │
│ Agents.md: <none> │
│ │
│ Token usage: 750 total (500 input + 250 output) │
│ Context window: 100% left (750 used / 272K) │
│ Limits: data not available yet │
╰─────────────────────────────────────────────────────────────────╯

View File

@@ -0,0 +1,22 @@
---
source: tui/src/status/tests.rs
expression: sanitized
---
/status
╭─────────────────────────────────────────────────────────────────╮
│ >_ OpenAI Codex (v0.0.0) │
│ │
│ Visit https://chatgpt.com/codex/settings/usage for up-to-date │
│ information on rate limits and credits │
│ │
│ Model: gpt-5-codex (reasoning none, summaries auto) │
│ Directory: [[workspace]] │
│ Approval: on-request │
│ Sandbox: read-only │
│ Agents.md: <none> │
│ │
│ Token usage: 750 total (500 input + 250 output) │
│ Context window: 100% left (750 used / 272K) │
│ Limits: data not available yet │
╰─────────────────────────────────────────────────────────────────╯

View File

@@ -0,0 +1,24 @@
---
source: tui/src/status/tests.rs
expression: sanitized
---
/status
╭─────────────────────────────────────────────────────────────────────╮
│ >_ OpenAI Codex (v0.0.0) │
│ │
│ Visit https://chatgpt.com/codex/settings/usage for up-to-date │
│ information on rate limits and credits │
│ │
│ Model: gpt-5-codex (reasoning none, summaries auto) │
│ Directory: [[workspace]] │
│ Approval: on-request │
│ Sandbox: read-only │
│ Agents.md: <none> │
│ │
│ Token usage: 1.9K total (1K input + 900 output) │
│ Context window: 100% left (2.25K used / 272K) │
│ 5h limit: [██████░░░░░░░░░░░░░░] 28% left (resets 03:14) │
│ Weekly limit: [████████████░░░░░░░░] 60% left (resets 03:34) │
│ Warning: limits may be stale - start new turn to refresh. │
╰─────────────────────────────────────────────────────────────────────╯

View File

@@ -0,0 +1,24 @@
---
source: tui/src/status/tests.rs
expression: sanitized
---
/status
╭────────────────────────────────────────────╮
│ >_ OpenAI Codex (v0.0.0) │
│ │
│ Visit https://chatgpt.com/codex/settings/ │
│ usage for up-to-date │
│ information on rate limits and credits │
│ │
│ Model: gpt-5-codex (reasoning │
│ Directory: [[workspace]] │
│ Approval: on-request │
│ Sandbox: read-only │
│ Agents.md: <none> │
│ │
│ Token usage: 1.9K total (1K input + │
│ Context window: 100% left (2.25K used / │
│ 5h limit: [██████░░░░░░░░░░░░░░] │
│ (resets 03:14) │
╰────────────────────────────────────────────╯

View File

@@ -0,0 +1,427 @@
use super::new_status_output;
use super::rate_limit_snapshot_display;
use crate::history_cell::HistoryCell;
use chrono::Duration as ChronoDuration;
use chrono::TimeZone;
use chrono::Utc;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::protocol::RateLimitSnapshot;
use codex_core::protocol::RateLimitWindow;
use codex_core::protocol::SandboxPolicy;
use codex_core::protocol::TokenUsage;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use insta::assert_snapshot;
use ratatui::prelude::*;
use std::path::PathBuf;
use tempfile::TempDir;
fn test_config(temp_home: &TempDir) -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_home.path().to_path_buf(),
)
.expect("load config")
}
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
lines
.iter()
.map(|line| {
line.spans
.iter()
.map(|span| span.content.as_ref())
.collect::<String>()
})
.collect()
}
fn sanitize_directory(lines: Vec<String>) -> Vec<String> {
lines
.into_iter()
.map(|line| {
if let (Some(dir_pos), Some(pipe_idx)) = (line.find("Directory: "), line.rfind('│')) {
let prefix = &line[..dir_pos + "Directory: ".len()];
let suffix = &line[pipe_idx..];
let content_width = pipe_idx.saturating_sub(dir_pos + "Directory: ".len());
let replacement = "[[workspace]]";
let mut rebuilt = prefix.to_string();
rebuilt.push_str(replacement);
if content_width > replacement.len() {
rebuilt.push_str(&" ".repeat(content_width - replacement.len()));
}
rebuilt.push_str(suffix);
rebuilt
} else {
line
}
})
.collect()
}
fn reset_at_from(captured_at: &chrono::DateTime<chrono::Local>, seconds: i64) -> i64 {
(*captured_at + ChronoDuration::seconds(seconds))
.with_timezone(&Utc)
.timestamp()
}
#[test]
fn status_snapshot_includes_reasoning_details() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.model_provider_id = "openai".to_string();
config.model_reasoning_effort = Some(ReasoningEffort::High);
config.model_reasoning_summary = ReasoningSummary::Detailed;
config.sandbox_policy = SandboxPolicy::WorkspaceWrite {
writable_roots: Vec::new(),
network_access: false,
exclude_tmpdir_env_var: false,
exclude_slash_tmp: false,
};
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 1_200,
cached_input_tokens: 200,
output_tokens: 900,
reasoning_output_tokens: 150,
total_tokens: 2_250,
};
let captured_at = chrono::Local
.with_ymd_and_hms(2024, 1, 2, 3, 4, 5)
.single()
.expect("timestamp");
let snapshot = RateLimitSnapshot {
primary: Some(RateLimitWindow {
used_percent: 72.5,
window_minutes: Some(300),
resets_at: Some(reset_at_from(&captured_at, 600)),
}),
secondary: Some(RateLimitWindow {
used_percent: 45.0,
window_minutes: Some(10080),
resets_at: Some(reset_at_from(&captured_at, 1_200)),
}),
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let composite = new_status_output(
&config,
&usage,
Some(&usage),
&None,
Some(&rate_display),
captured_at,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
for line in &mut rendered_lines {
*line = line.replace('\\', "/");
}
}
let sanitized = sanitize_directory(rendered_lines).join("\n");
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_includes_monthly_limit() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.model_provider_id = "openai".to_string();
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 800,
cached_input_tokens: 0,
output_tokens: 400,
reasoning_output_tokens: 0,
total_tokens: 1_200,
};
let captured_at = chrono::Local
.with_ymd_and_hms(2024, 5, 6, 7, 8, 9)
.single()
.expect("timestamp");
let snapshot = RateLimitSnapshot {
primary: Some(RateLimitWindow {
used_percent: 12.0,
window_minutes: Some(43_200),
resets_at: Some(reset_at_from(&captured_at, 86_400)),
}),
secondary: None,
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let composite = new_status_output(
&config,
&usage,
Some(&usage),
&None,
Some(&rate_display),
captured_at,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
for line in &mut rendered_lines {
*line = line.replace('\\', "/");
}
}
let sanitized = sanitize_directory(rendered_lines).join("\n");
assert_snapshot!(sanitized);
}
#[test]
fn status_card_token_usage_excludes_cached_tokens() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 1_200,
cached_input_tokens: 200,
output_tokens: 900,
reasoning_output_tokens: 0,
total_tokens: 2_100,
};
let now = chrono::Local
.with_ymd_and_hms(2024, 1, 1, 0, 0, 0)
.single()
.expect("timestamp");
let composite = new_status_output(&config, &usage, Some(&usage), &None, None, now);
let rendered = render_lines(&composite.display_lines(120));
assert!(
rendered.iter().all(|line| !line.contains("cached")),
"cached tokens should not be displayed, got: {rendered:?}"
);
}
#[test]
fn status_snapshot_truncates_in_narrow_terminal() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.model_provider_id = "openai".to_string();
config.model_reasoning_effort = Some(ReasoningEffort::High);
config.model_reasoning_summary = ReasoningSummary::Detailed;
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 1_200,
cached_input_tokens: 200,
output_tokens: 900,
reasoning_output_tokens: 150,
total_tokens: 2_250,
};
let captured_at = chrono::Local
.with_ymd_and_hms(2024, 1, 2, 3, 4, 5)
.single()
.expect("timestamp");
let snapshot = RateLimitSnapshot {
primary: Some(RateLimitWindow {
used_percent: 72.5,
window_minutes: Some(300),
resets_at: Some(reset_at_from(&captured_at, 600)),
}),
secondary: None,
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let composite = new_status_output(
&config,
&usage,
Some(&usage),
&None,
Some(&rate_display),
captured_at,
);
let mut rendered_lines = render_lines(&composite.display_lines(46));
if cfg!(windows) {
for line in &mut rendered_lines {
*line = line.replace('\\', "/");
}
}
let sanitized = sanitize_directory(rendered_lines).join("\n");
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_missing_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 500,
cached_input_tokens: 0,
output_tokens: 250,
reasoning_output_tokens: 0,
total_tokens: 750,
};
let now = chrono::Local
.with_ymd_and_hms(2024, 2, 3, 4, 5, 6)
.single()
.expect("timestamp");
let composite = new_status_output(&config, &usage, Some(&usage), &None, None, now);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
for line in &mut rendered_lines {
*line = line.replace('\\', "/");
}
}
let sanitized = sanitize_directory(rendered_lines).join("\n");
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_empty_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 500,
cached_input_tokens: 0,
output_tokens: 250,
reasoning_output_tokens: 0,
total_tokens: 750,
};
let snapshot = RateLimitSnapshot {
primary: None,
secondary: None,
};
let captured_at = chrono::Local
.with_ymd_and_hms(2024, 6, 7, 8, 9, 10)
.single()
.expect("timestamp");
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let composite = new_status_output(
&config,
&usage,
Some(&usage),
&None,
Some(&rate_display),
captured_at,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
for line in &mut rendered_lines {
*line = line.replace('\\', "/");
}
}
let sanitized = sanitize_directory(rendered_lines).join("\n");
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_stale_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model = "gpt-5-codex".to_string();
config.cwd = PathBuf::from("/workspace/tests");
let usage = TokenUsage {
input_tokens: 1_200,
cached_input_tokens: 200,
output_tokens: 900,
reasoning_output_tokens: 150,
total_tokens: 2_250,
};
let captured_at = chrono::Local
.with_ymd_and_hms(2024, 1, 2, 3, 4, 5)
.single()
.expect("timestamp");
let snapshot = RateLimitSnapshot {
primary: Some(RateLimitWindow {
used_percent: 72.5,
window_minutes: Some(300),
resets_at: Some(reset_at_from(&captured_at, 600)),
}),
secondary: Some(RateLimitWindow {
used_percent: 40.0,
window_minutes: Some(10_080),
resets_at: Some(reset_at_from(&captured_at, 1_800)),
}),
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let now = captured_at + ChronoDuration::minutes(20);
let composite = new_status_output(
&config,
&usage,
Some(&usage),
&None,
Some(&rate_display),
now,
);
let mut rendered_lines = render_lines(&composite.display_lines(80));
if cfg!(windows) {
for line in &mut rendered_lines {
*line = line.replace('\\', "/");
}
}
let sanitized = sanitize_directory(rendered_lines).join("\n");
assert_snapshot!(sanitized);
}
#[test]
fn status_context_window_uses_last_usage() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
config.model_context_window = Some(272_000);
let total_usage = TokenUsage {
input_tokens: 12_800,
cached_input_tokens: 0,
output_tokens: 879,
reasoning_output_tokens: 0,
total_tokens: 102_000,
};
let last_usage = TokenUsage {
input_tokens: 12_800,
cached_input_tokens: 0,
output_tokens: 879,
reasoning_output_tokens: 0,
total_tokens: 13_679,
};
let now = chrono::Local
.with_ymd_and_hms(2024, 6, 1, 12, 0, 0)
.single()
.expect("timestamp");
let composite = new_status_output(&config, &total_usage, Some(&last_usage), &None, None, now);
let rendered_lines = render_lines(&composite.display_lines(80));
let context_line = rendered_lines
.into_iter()
.find(|line| line.contains("Context window"))
.expect("context line");
assert!(
context_line.contains("13.7K used / 272K"),
"expected context line to reflect last usage tokens, got: {context_line}"
);
assert!(
!context_line.contains("102K"),
"context line should not use total aggregated tokens, got: {context_line}"
);
}