Jeremy Rose
2025-09-02 10:29:58 -07:00
committed by GitHub
parent 3f8d6021ac
commit e442ecedab
84 changed files with 2896 additions and 2167 deletions

8
codex-rs/Cargo.lock generated
View File

@@ -973,11 +973,13 @@ dependencies = [
"diffy",
"image",
"insta",
"itertools 0.14.0",
"lazy_static",
"libc",
"mcp-types",
"once_cell",
"path-clean",
"pathdiff",
"pretty_assertions",
"rand 0.9.2",
"ratatui",
@@ -3377,6 +3379,12 @@ dependencies = [
"once_cell",
]
[[package]]
name = "pathdiff"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]]
name = "percent-encoding"
version = "2.3.1"

View File

@@ -116,7 +116,9 @@ pub enum ApplyPatchFileChange {
Add {
content: String,
},
Delete,
Delete {
content: String,
},
Update {
unified_diff: String,
move_path: Option<PathBuf>,
@@ -210,7 +212,18 @@ pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApp
changes.insert(path, ApplyPatchFileChange::Add { content: contents });
}
Hunk::DeleteFile { .. } => {
changes.insert(path, ApplyPatchFileChange::Delete);
let content = match std::fs::read_to_string(&path) {
Ok(content) => content,
Err(e) => {
return MaybeApplyPatchVerified::CorrectnessError(
ApplyPatchError::IoError(IoError {
context: format!("Failed to read {}", path.display()),
source: e,
}),
);
}
};
changes.insert(path, ApplyPatchFileChange::Delete { content });
}
Hunk::UpdateFile {
move_path, chunks, ..

View File

@@ -109,7 +109,9 @@ pub(crate) fn convert_apply_patch_to_protocol(
ApplyPatchFileChange::Add { content } => FileChange::Add {
content: content.clone(),
},
ApplyPatchFileChange::Delete => FileChange::Delete,
ApplyPatchFileChange::Delete { content } => FileChange::Delete {
content: content.clone(),
},
ApplyPatchFileChange::Update {
unified_diff,
move_path,

View File

@@ -10,7 +10,35 @@ use tokio::process::Command;
use tokio::time::Duration as TokioDuration;
use tokio::time::timeout;
use crate::util::is_inside_git_repo;
/// Return `true` if the project folder specified by the `Config` is inside a
/// Git repository.
///
/// The check walks up the directory hierarchy looking for a `.git` file or
/// directory (note `.git` can be a file that contains a `gitdir` entry). This
/// approach does **not** require the `git` binary or the `git2` crate and is
/// therefore fairly lightweight.
///
/// Note that this does **not** detect *worktrees* created with
/// `git worktree add` where the checkout lives outside the main repository
/// directory. If you need Codex to work from such a checkout simply pass the
/// `--allow-no-git-exec` CLI flag that disables the repo requirement.
pub fn get_git_repo_root(base_dir: &Path) -> Option<PathBuf> {
let mut dir = base_dir.to_path_buf();
loop {
if dir.join(".git").exists() {
return Some(dir);
}
// Pop one component (go up one directory). `pop` returns false when
// we have reached the filesystem root.
if !dir.pop() {
break;
}
}
None
}
/// Timeout for git commands to prevent freezing on large repositories
const GIT_COMMAND_TIMEOUT: TokioDuration = TokioDuration::from_secs(5);
@@ -94,9 +122,7 @@ pub async fn collect_git_info(cwd: &Path) -> Option<GitInfo> {
/// Returns the closest git sha to HEAD that is on a remote as well as the diff to that sha.
pub async fn git_diff_to_remote(cwd: &Path) -> Option<GitDiffToRemote> {
if !is_inside_git_repo(cwd) {
return None;
}
get_git_repo_root(cwd)?;
let remotes = get_git_remotes(cwd).await?;
let branches = branch_ancestry(cwd).await?;
@@ -440,7 +466,7 @@ async fn diff_against_sha(cwd: &Path, sha: &GitSha) -> Option<String> {
}
/// Resolve the path that should be used for trust checks. Similar to
/// `[utils::is_inside_git_repo]`, but resolves to the root of the main
/// `[get_git_repo_root]`, but resolves to the root of the main
/// repository. Handles worktrees.
pub fn resolve_root_git_project_for_trust(cwd: &Path) -> Option<PathBuf> {
let base = if cwd.is_dir() { cwd } else { cwd.parent()? };

View File

@@ -20,22 +20,6 @@ pub enum ParsedCommand {
query: Option<String>,
path: Option<String>,
},
Format {
cmd: String,
tool: Option<String>,
targets: Option<Vec<String>>,
},
Test {
cmd: String,
},
Lint {
cmd: String,
tool: Option<String>,
targets: Option<Vec<String>>,
},
Noop {
cmd: String,
},
Unknown {
cmd: String,
},
@@ -50,10 +34,6 @@ impl From<ParsedCommand> for codex_protocol::parse_command::ParsedCommand {
ParsedCommand::Read { cmd, name } => P::Read { cmd, name },
ParsedCommand::ListFiles { cmd, path } => P::ListFiles { cmd, path },
ParsedCommand::Search { cmd, query, path } => P::Search { cmd, query, path },
ParsedCommand::Format { cmd, tool, targets } => P::Format { cmd, tool, targets },
ParsedCommand::Test { cmd } => P::Test { cmd },
ParsedCommand::Lint { cmd, tool, targets } => P::Lint { cmd, tool, targets },
ParsedCommand::Noop { cmd } => P::Noop { cmd },
ParsedCommand::Unknown { cmd } => P::Unknown { cmd },
}
}
@@ -122,7 +102,7 @@ mod tests {
assert_parsed(
&vec_str(&["bash", "-lc", inner]),
vec![ParsedCommand::Unknown {
cmd: "git status | wc -l".to_string(),
cmd: "git status".to_string(),
}],
);
}
@@ -244,6 +224,17 @@ mod tests {
);
}
#[test]
fn cd_then_cat_is_single_read() {
assert_parsed(
&shlex_split_safe("cd foo && cat foo.txt"),
vec![ParsedCommand::Read {
cmd: "cat foo.txt".to_string(),
name: "foo.txt".to_string(),
}],
);
}
#[test]
fn supports_ls_with_pipe() {
let inner = "ls -la | sed -n '1,120p'";
@@ -315,27 +306,6 @@ mod tests {
);
}
#[test]
fn supports_npm_run_with_forwarded_args() {
assert_parsed(
&vec_str(&[
"npm",
"run",
"lint",
"--",
"--max-warnings",
"0",
"--format",
"json",
]),
vec![ParsedCommand::Lint {
cmd: "npm run lint -- --max-warnings 0 --format json".to_string(),
tool: Some("npm-script:lint".to_string()),
targets: None,
}],
);
}
#[test]
fn supports_grep_recursive_current_dir() {
assert_parsed(
@@ -396,173 +366,10 @@ mod tests {
fn supports_cd_and_rg_files() {
assert_parsed(
&shlex_split_safe("cd codex-rs && rg --files"),
vec![
ParsedCommand::Unknown {
cmd: "cd codex-rs".to_string(),
},
ParsedCommand::Search {
cmd: "rg --files".to_string(),
query: None,
path: None,
},
],
);
}
#[test]
fn echo_then_cargo_test_sequence() {
assert_parsed(
&shlex_split_safe("echo Running tests... && cargo test --all-features --quiet"),
vec![ParsedCommand::Test {
cmd: "cargo test --all-features --quiet".to_string(),
}],
);
}
#[test]
fn supports_cargo_fmt_and_test_with_config() {
assert_parsed(
&shlex_split_safe(
"cargo fmt -- --config imports_granularity=Item && cargo test -p core --all-features",
),
vec![
ParsedCommand::Format {
cmd: "cargo fmt -- --config 'imports_granularity=Item'".to_string(),
tool: Some("cargo fmt".to_string()),
targets: None,
},
ParsedCommand::Test {
cmd: "cargo test -p core --all-features".to_string(),
},
],
);
}
#[test]
fn recognizes_rustfmt_and_clippy() {
assert_parsed(
&shlex_split_safe("rustfmt src/main.rs"),
vec![ParsedCommand::Format {
cmd: "rustfmt src/main.rs".to_string(),
tool: Some("rustfmt".to_string()),
targets: Some(vec!["src/main.rs".to_string()]),
}],
);
assert_parsed(
&shlex_split_safe("cargo clippy -p core --all-features -- -D warnings"),
vec![ParsedCommand::Lint {
cmd: "cargo clippy -p core --all-features -- -D warnings".to_string(),
tool: Some("cargo clippy".to_string()),
targets: None,
}],
);
}
#[test]
fn recognizes_pytest_go_and_tools() {
assert_parsed(
&shlex_split_safe(
"pytest -k 'Login and not slow' tests/test_login.py::TestLogin::test_ok",
),
vec![ParsedCommand::Test {
cmd: "pytest -k 'Login and not slow' tests/test_login.py::TestLogin::test_ok"
.to_string(),
}],
);
assert_parsed(
&shlex_split_safe("go fmt ./..."),
vec![ParsedCommand::Format {
cmd: "go fmt ./...".to_string(),
tool: Some("go fmt".to_string()),
targets: Some(vec!["./...".to_string()]),
}],
);
assert_parsed(
&shlex_split_safe("go test ./pkg -run TestThing"),
vec![ParsedCommand::Test {
cmd: "go test ./pkg -run TestThing".to_string(),
}],
);
assert_parsed(
&shlex_split_safe("eslint . --max-warnings 0"),
vec![ParsedCommand::Lint {
cmd: "eslint . --max-warnings 0".to_string(),
tool: Some("eslint".to_string()),
targets: Some(vec![".".to_string()]),
}],
);
assert_parsed(
&shlex_split_safe("prettier -w ."),
vec![ParsedCommand::Format {
cmd: "prettier -w .".to_string(),
tool: Some("prettier".to_string()),
targets: Some(vec![".".to_string()]),
}],
);
}
#[test]
fn recognizes_jest_and_vitest_filters() {
assert_parsed(
&shlex_split_safe("jest -t 'should work' src/foo.test.ts"),
vec![ParsedCommand::Test {
cmd: "jest -t 'should work' src/foo.test.ts".to_string(),
}],
);
assert_parsed(
&shlex_split_safe("vitest -t 'runs' src/foo.test.tsx"),
vec![ParsedCommand::Test {
cmd: "vitest -t runs src/foo.test.tsx".to_string(),
}],
);
}
#[test]
fn recognizes_npx_and_scripts() {
assert_parsed(
&shlex_split_safe("npx eslint src"),
vec![ParsedCommand::Lint {
cmd: "npx eslint src".to_string(),
tool: Some("eslint".to_string()),
targets: Some(vec!["src".to_string()]),
}],
);
assert_parsed(
&shlex_split_safe("npx prettier -c ."),
vec![ParsedCommand::Format {
cmd: "npx prettier -c .".to_string(),
tool: Some("prettier".to_string()),
targets: Some(vec![".".to_string()]),
}],
);
assert_parsed(
&shlex_split_safe("pnpm run lint -- --max-warnings 0"),
vec![ParsedCommand::Lint {
cmd: "pnpm run lint -- --max-warnings 0".to_string(),
tool: Some("pnpm-script:lint".to_string()),
targets: None,
}],
);
assert_parsed(
&shlex_split_safe("npm test"),
vec![ParsedCommand::Test {
cmd: "npm test".to_string(),
}],
);
assert_parsed(
&shlex_split_safe("yarn test"),
vec![ParsedCommand::Test {
cmd: "yarn test".to_string(),
vec![ParsedCommand::Search {
cmd: "rg --files".to_string(),
query: None,
path: None,
}],
);
}
@@ -770,6 +577,51 @@ mod tests {
);
}
#[test]
fn parses_mixed_sequence_with_pipes_semicolons_and_or() {
// Provided long command sequence combining sequencing, pipelines, and ORs.
let inner = "pwd; ls -la; rg --files -g '!target' | wc -l; rg -n '^\\[workspace\\]' -n Cargo.toml || true; rg -n '^\\[package\\]' -n */Cargo.toml || true; cargo --version; rustc --version; cargo clippy --workspace --all-targets --all-features -q";
let args = vec_str(&["bash", "-lc", inner]);
let expected = vec![
ParsedCommand::Unknown {
cmd: "pwd".to_string(),
},
ParsedCommand::ListFiles {
cmd: shlex_join(&shlex_split_safe("ls -la")),
path: None,
},
ParsedCommand::Search {
cmd: shlex_join(&shlex_split_safe("rg --files -g '!target'")),
query: None,
path: Some("!target".to_string()),
},
ParsedCommand::Search {
cmd: shlex_join(&shlex_split_safe("rg -n '^\\[workspace\\]' -n Cargo.toml")),
query: Some("^\\[workspace\\]".to_string()),
path: Some("Cargo.toml".to_string()),
},
ParsedCommand::Search {
cmd: shlex_join(&shlex_split_safe("rg -n '^\\[package\\]' -n */Cargo.toml")),
query: Some("^\\[package\\]".to_string()),
path: Some("Cargo.toml".to_string()),
},
ParsedCommand::Unknown {
cmd: shlex_join(&shlex_split_safe("cargo --version")),
},
ParsedCommand::Unknown {
cmd: shlex_join(&shlex_split_safe("rustc --version")),
},
ParsedCommand::Unknown {
cmd: shlex_join(&shlex_split_safe(
"cargo clippy --workspace --all-targets --all-features -q",
)),
},
];
assert_parsed(&args, expected);
}
#[test]
fn strips_true_in_sequence() {
// `true` should be dropped from parsed sequences
@@ -867,159 +719,6 @@ mod tests {
);
}
#[test]
fn pnpm_test_is_parsed_as_test() {
assert_parsed(
&shlex_split_safe("pnpm test"),
vec![ParsedCommand::Test {
cmd: "pnpm test".to_string(),
}],
);
}
#[test]
fn pnpm_exec_vitest_is_unknown() {
// From commands_combined: cd codex-cli && pnpm exec vitest run tests/... --threads=false --passWithNoTests
let inner = "cd codex-cli && pnpm exec vitest run tests/file-tag-utils.test.ts --threads=false --passWithNoTests";
assert_parsed(
&shlex_split_safe(inner),
vec![
ParsedCommand::Unknown {
cmd: "cd codex-cli".to_string(),
},
ParsedCommand::Unknown {
cmd: "pnpm exec vitest run tests/file-tag-utils.test.ts '--threads=false' --passWithNoTests".to_string(),
},
],
);
}
#[test]
fn cargo_test_with_crate() {
assert_parsed(
&shlex_split_safe("cargo test -p codex-core parse_command::"),
vec![ParsedCommand::Test {
cmd: "cargo test -p codex-core parse_command::".to_string(),
}],
);
}
#[test]
fn cargo_test_with_crate_2() {
assert_parsed(
&shlex_split_safe(
"cd core && cargo test -q parse_command::tests::bash_dash_c_pipeline_parsing parse_command::tests::fd_file_finder_variants",
),
vec![ParsedCommand::Test {
cmd: "cargo test -q parse_command::tests::bash_dash_c_pipeline_parsing parse_command::tests::fd_file_finder_variants".to_string(),
}],
);
}
#[test]
fn cargo_test_with_crate_3() {
assert_parsed(
&shlex_split_safe("cd core && cargo test -q parse_command::tests"),
vec![ParsedCommand::Test {
cmd: "cargo test -q parse_command::tests".to_string(),
}],
);
}
#[test]
fn cargo_test_with_crate_4() {
assert_parsed(
&shlex_split_safe("cd core && cargo test --all-features parse_command -- --nocapture"),
vec![ParsedCommand::Test {
cmd: "cargo test --all-features parse_command -- --nocapture".to_string(),
}],
);
}
// Additional coverage for other common tools/frameworks
#[test]
fn recognizes_black_and_ruff() {
// black formats Python code
assert_parsed(
&shlex_split_safe("black src"),
vec![ParsedCommand::Format {
cmd: "black src".to_string(),
tool: Some("black".to_string()),
targets: Some(vec!["src".to_string()]),
}],
);
// ruff check is a linter; ensure we collect targets
assert_parsed(
&shlex_split_safe("ruff check ."),
vec![ParsedCommand::Lint {
cmd: "ruff check .".to_string(),
tool: Some("ruff".to_string()),
targets: Some(vec![".".to_string()]),
}],
);
// ruff format is a formatter
assert_parsed(
&shlex_split_safe("ruff format pkg/"),
vec![ParsedCommand::Format {
cmd: "ruff format pkg/".to_string(),
tool: Some("ruff".to_string()),
targets: Some(vec!["pkg/".to_string()]),
}],
);
}
#[test]
fn recognizes_pnpm_monorepo_test_and_npm_format_script() {
// pnpm -r test in a monorepo should still parse as a test action
assert_parsed(
&shlex_split_safe("pnpm -r test"),
vec![ParsedCommand::Test {
cmd: "pnpm -r test".to_string(),
}],
);
// npm run format should be recognized as a format action
assert_parsed(
&shlex_split_safe("npm run format -- -w ."),
vec![ParsedCommand::Format {
cmd: "npm run format -- -w .".to_string(),
tool: Some("npm-script:format".to_string()),
targets: None,
}],
);
}
#[test]
fn yarn_test_is_parsed_as_test() {
assert_parsed(
&shlex_split_safe("yarn test"),
vec![ParsedCommand::Test {
cmd: "yarn test".to_string(),
}],
);
}
#[test]
fn pytest_file_only_and_go_run_regex() {
// pytest invoked with a file path should be captured as a filter
assert_parsed(
&shlex_split_safe("pytest tests/test_example.py"),
vec![ParsedCommand::Test {
cmd: "pytest tests/test_example.py".to_string(),
}],
);
// go test with -run regex should capture the filter
assert_parsed(
&shlex_split_safe("go test ./... -run '^TestFoo$'"),
vec![ParsedCommand::Test {
cmd: "go test ./... -run '^TestFoo$'".to_string(),
}],
);
}
#[test]
fn grep_with_query_and_path() {
assert_parsed(
@@ -1090,30 +789,6 @@ mod tests {
);
}
#[test]
fn eslint_with_config_path_and_target() {
assert_parsed(
&shlex_split_safe("eslint -c .eslintrc.json src"),
vec![ParsedCommand::Lint {
cmd: "eslint -c .eslintrc.json src".to_string(),
tool: Some("eslint".to_string()),
targets: Some(vec!["src".to_string()]),
}],
);
}
#[test]
fn npx_eslint_with_config_path_and_target() {
assert_parsed(
&shlex_split_safe("npx eslint -c .eslintrc src"),
vec![ParsedCommand::Lint {
cmd: "npx eslint -c .eslintrc src".to_string(),
tool: Some("eslint".to_string()),
targets: Some(vec!["src".to_string()]),
}],
);
}
#[test]
fn fd_file_finder_variants() {
assert_parsed(
@@ -1202,16 +877,13 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
return Some(commands[1..].to_vec());
}
// cd foo && [any Test command] => [any Test command]
// cd foo && [any command] => [any command] (keep non-cd when a cd is followed by something)
if let Some(idx) = commands.iter().position(|pc| match pc {
ParsedCommand::Unknown { cmd } => {
shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("cd"))
}
_ => false,
}) && commands
.iter()
.skip(idx + 1)
.any(|pc| matches!(pc, ParsedCommand::Test { .. }))
}) && commands.len() > idx + 1
{
let mut out = Vec::with_capacity(commands.len() - 1);
out.extend_from_slice(&commands[..idx]);
@@ -1220,10 +892,10 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
}
// cmd || true => cmd
if let Some(idx) = commands.iter().position(|pc| match pc {
ParsedCommand::Noop { cmd } => cmd == "true",
_ => false,
}) {
if let Some(idx) = commands
.iter()
.position(|pc| matches!(pc, ParsedCommand::Unknown { cmd } if cmd == "true"))
{
let mut out = Vec::with_capacity(commands.len() - 1);
out.extend_from_slice(&commands[..idx]);
out.extend_from_slice(&commands[idx + 1..]);
@@ -1377,75 +1049,6 @@ fn skip_flag_values<'a>(args: &'a [String], flags_with_vals: &[&str]) -> Vec<&'a
out
}
/// Common flags for ESLint that take a following value and should not be
/// considered positional targets.
const ESLINT_FLAGS_WITH_VALUES: &[&str] = &[
"-c",
"--config",
"--parser",
"--parser-options",
"--rulesdir",
"--plugin",
"--max-warnings",
"--format",
];
fn collect_non_flag_targets(args: &[String]) -> Option<Vec<String>> {
let mut targets = Vec::new();
let mut skip_next = false;
for (i, a) in args.iter().enumerate() {
if a == "--" {
break;
}
if skip_next {
skip_next = false;
continue;
}
if a == "-p"
|| a == "--package"
|| a == "--features"
|| a == "-C"
|| a == "--config"
|| a == "--config-path"
|| a == "--out-dir"
|| a == "-o"
|| a == "--run"
|| a == "--max-warnings"
|| a == "--format"
{
if i + 1 < args.len() {
skip_next = true;
}
continue;
}
if a.starts_with('-') {
continue;
}
targets.push(a.clone());
}
if targets.is_empty() {
None
} else {
Some(targets)
}
}
fn collect_non_flag_targets_with_flags(
args: &[String],
flags_with_vals: &[&str],
) -> Option<Vec<String>> {
let targets: Vec<String> = skip_flag_values(args, flags_with_vals)
.into_iter()
.filter(|a| !a.starts_with('-'))
.cloned()
.collect();
if targets.is_empty() {
None
} else {
Some(targets)
}
}
fn is_pathish(s: &str) -> bool {
s == "."
|| s == ".."
@@ -1514,47 +1117,6 @@ fn parse_find_query_and_path(tail: &[String]) -> (Option<String>, Option<String>
(query, path)
}
fn classify_npm_like(tool: &str, tail: &[String], full_cmd: &[String]) -> Option<ParsedCommand> {
let mut r = tail;
if tool == "pnpm" && r.first().map(|s| s.as_str()) == Some("-r") {
r = &r[1..];
}
let mut script_name: Option<String> = None;
if r.first().map(|s| s.as_str()) == Some("run") {
script_name = r.get(1).cloned();
} else {
let is_test_cmd = (tool == "npm" && r.first().map(|s| s.as_str()) == Some("t"))
|| ((tool == "npm" || tool == "pnpm" || tool == "yarn")
&& r.first().map(|s| s.as_str()) == Some("test"));
if is_test_cmd {
script_name = Some("test".to_string());
}
}
if let Some(name) = script_name {
let lname = name.to_lowercase();
if lname == "test" || lname == "unit" || lname == "jest" || lname == "vitest" {
return Some(ParsedCommand::Test {
cmd: shlex_join(full_cmd),
});
}
if lname == "lint" || lname == "eslint" {
return Some(ParsedCommand::Lint {
cmd: shlex_join(full_cmd),
tool: Some(format!("{tool}-script:{name}")),
targets: None,
});
}
if lname == "format" || lname == "fmt" || lname == "prettier" {
return Some(ParsedCommand::Format {
cmd: shlex_join(full_cmd),
tool: Some(format!("{tool}-script:{name}")),
targets: None,
});
}
}
None
}
fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
let [bash, flag, script] = original else {
return None;
@@ -1586,7 +1148,7 @@ fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
.map(|tokens| summarize_main_tokens(&tokens))
.collect();
if commands.len() > 1 {
commands.retain(|pc| !matches!(pc, ParsedCommand::Noop { .. }));
commands.retain(|pc| !matches!(pc, ParsedCommand::Unknown { cmd } if cmd == "true"));
}
if commands.len() == 1 {
// If we reduced to a single command, attribute the full original script
@@ -1655,27 +1217,7 @@ fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
}
}
}
ParsedCommand::Format {
tool, targets, cmd, ..
} => ParsedCommand::Format {
cmd: cmd.clone(),
tool,
targets,
},
ParsedCommand::Test { cmd, .. } => ParsedCommand::Test { cmd: cmd.clone() },
ParsedCommand::Lint {
tool, targets, cmd, ..
} => ParsedCommand::Lint {
cmd: cmd.clone(),
tool,
targets,
},
ParsedCommand::Unknown { .. } => ParsedCommand::Unknown {
cmd: script.clone(),
},
ParsedCommand::Noop { .. } => ParsedCommand::Noop {
cmd: script.clone(),
},
other => other,
})
.collect();
}
@@ -1728,124 +1270,6 @@ fn drop_small_formatting_commands(mut commands: Vec<Vec<String>>) -> Vec<Vec<Str
fn summarize_main_tokens(main_cmd: &[String]) -> ParsedCommand {
match main_cmd.split_first() {
Some((head, tail)) if head == "true" && tail.is_empty() => ParsedCommand::Noop {
cmd: shlex_join(main_cmd),
},
// (sed-specific logic handled below in dedicated arm returning Read)
Some((head, tail))
if head == "cargo" && tail.first().map(|s| s.as_str()) == Some("fmt") =>
{
ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("cargo fmt".to_string()),
targets: collect_non_flag_targets(&tail[1..]),
}
}
Some((head, tail))
if head == "cargo" && tail.first().map(|s| s.as_str()) == Some("clippy") =>
{
ParsedCommand::Lint {
cmd: shlex_join(main_cmd),
tool: Some("cargo clippy".to_string()),
targets: collect_non_flag_targets(&tail[1..]),
}
}
Some((head, tail))
if head == "cargo" && tail.first().map(|s| s.as_str()) == Some("test") =>
{
ParsedCommand::Test {
cmd: shlex_join(main_cmd),
}
}
Some((head, tail)) if head == "rustfmt" => ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("rustfmt".to_string()),
targets: collect_non_flag_targets(tail),
},
Some((head, tail)) if head == "go" && tail.first().map(|s| s.as_str()) == Some("fmt") => {
ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("go fmt".to_string()),
targets: collect_non_flag_targets(&tail[1..]),
}
}
Some((head, tail)) if head == "go" && tail.first().map(|s| s.as_str()) == Some("test") => {
ParsedCommand::Test {
cmd: shlex_join(main_cmd),
}
}
Some((head, _)) if head == "pytest" => ParsedCommand::Test {
cmd: shlex_join(main_cmd),
},
Some((head, tail)) if head == "eslint" => {
// Treat configuration flags with values (e.g. `-c .eslintrc`) as non-targets.
let targets = collect_non_flag_targets_with_flags(tail, ESLINT_FLAGS_WITH_VALUES);
ParsedCommand::Lint {
cmd: shlex_join(main_cmd),
tool: Some("eslint".to_string()),
targets,
}
}
Some((head, tail)) if head == "prettier" => ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("prettier".to_string()),
targets: collect_non_flag_targets(tail),
},
Some((head, tail)) if head == "black" => ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("black".to_string()),
targets: collect_non_flag_targets(tail),
},
Some((head, tail))
if head == "ruff" && tail.first().map(|s| s.as_str()) == Some("check") =>
{
ParsedCommand::Lint {
cmd: shlex_join(main_cmd),
tool: Some("ruff".to_string()),
targets: collect_non_flag_targets(&tail[1..]),
}
}
Some((head, tail))
if head == "ruff" && tail.first().map(|s| s.as_str()) == Some("format") =>
{
ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("ruff".to_string()),
targets: collect_non_flag_targets(&tail[1..]),
}
}
Some((head, _)) if (head == "jest" || head == "vitest") => ParsedCommand::Test {
cmd: shlex_join(main_cmd),
},
Some((head, tail))
if head == "npx" && tail.first().map(|s| s.as_str()) == Some("eslint") =>
{
let targets = collect_non_flag_targets_with_flags(&tail[1..], ESLINT_FLAGS_WITH_VALUES);
ParsedCommand::Lint {
cmd: shlex_join(main_cmd),
tool: Some("eslint".to_string()),
targets,
}
}
Some((head, tail))
if head == "npx" && tail.first().map(|s| s.as_str()) == Some("prettier") =>
{
ParsedCommand::Format {
cmd: shlex_join(main_cmd),
tool: Some("prettier".to_string()),
targets: collect_non_flag_targets(&tail[1..]),
}
}
// NPM-like scripts including yarn
Some((tool, tail)) if (tool == "pnpm" || tool == "npm" || tool == "yarn") => {
if let Some(cmd) = classify_npm_like(tool, tail, main_cmd) {
cmd
} else {
ParsedCommand::Unknown {
cmd: shlex_join(main_cmd),
}
}
}
Some((head, tail)) if head == "ls" => {
// Avoid treating option values as paths (e.g., ls -I "*.test.js").
let candidates = skip_flag_values(

View File

@@ -222,7 +222,7 @@ fn is_write_patch_constrained_to_writable_paths(
for (path, change) in action.changes() {
match change {
ApplyPatchFileChange::Add { .. } | ApplyPatchFileChange::Delete => {
ApplyPatchFileChange::Add { .. } | ApplyPatchFileChange::Delete { .. } => {
if !is_path_writable(path) {
return false;
}

View File

@@ -578,7 +578,12 @@ index {ZERO_OID}..{right_oid}
fs::write(&file, "x\n").unwrap();
let mut acc = TurnDiffTracker::new();
let del_changes = HashMap::from([(file.clone(), FileChange::Delete)]);
let del_changes = HashMap::from([(
file.clone(),
FileChange::Delete {
content: "x\n".to_string(),
},
)]);
acc.on_patch_begin(&del_changes);
// Simulate apply: delete the file from disk.
@@ -741,7 +746,12 @@ index {left_oid}..{right_oid}
assert_eq!(first, expected_first);
// Next: introduce a brand-new path b.txt into baseline snapshots via a delete change.
let del_b = HashMap::from([(b.clone(), FileChange::Delete)]);
let del_b = HashMap::from([(
b.clone(),
FileChange::Delete {
content: "z\n".to_string(),
},
)]);
acc.on_patch_begin(&del_b);
// Simulate apply: delete b.txt.
let baseline_mode = file_mode_for_path(&b).unwrap_or(FileMode::Regular);

View File

@@ -1,4 +1,3 @@
use std::path::Path;
use std::time::Duration;
use rand::Rng;
@@ -12,33 +11,3 @@ pub(crate) fn backoff(attempt: u64) -> Duration {
let jitter = rand::rng().random_range(0.9..1.1);
Duration::from_millis((base as f64 * jitter) as u64)
}
/// Return `true` if the project folder specified by the `Config` is inside a
/// Git repository.
///
/// The check walks up the directory hierarchy looking for a `.git` file or
/// directory (note `.git` can be a file that contains a `gitdir` entry). This
/// approach does **not** require the `git` binary or the `git2` crate and is
/// therefore fairly lightweight.
///
/// Note that this does **not** detect *worktrees* created with
/// `git worktree add` where the checkout lives outside the main repository
/// directory. If you need Codex to work from such a checkout simply pass the
/// `--allow-no-git-exec` CLI flag that disables the repo requirement.
pub fn is_inside_git_repo(base_dir: &Path) -> bool {
let mut dir = base_dir.to_path_buf();
loop {
if dir.join(".git").exists() {
return true;
}
// Pop one component (go up one directory). `pop` returns false when
// we have reached the filesystem root.
if !dir.pop() {
break;
}
}
false
}

View File

@@ -404,13 +404,16 @@ impl EventProcessor for EventProcessorWithHumanOutput {
println!("{}", line.style(self.green));
}
}
FileChange::Delete => {
FileChange::Delete { content } => {
let header = format!(
"{} {}",
format_file_change(change),
path.to_string_lossy()
);
println!("{}", header.style(self.magenta));
for line in content.lines() {
println!("{}", line.style(self.red));
}
}
FileChange::Update {
unified_diff,
@@ -560,7 +563,7 @@ fn escape_command(command: &[String]) -> String {
fn format_file_change(change: &FileChange) -> &'static str {
match change {
FileChange::Add { .. } => "A",
FileChange::Delete => "D",
FileChange::Delete { .. } => "D",
FileChange::Update {
move_path: Some(_), ..
} => "R",

View File

@@ -13,13 +13,13 @@ use codex_core::ConversationManager;
use codex_core::NewConversation;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::git_info::get_git_repo_root;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::Event;
use codex_core::protocol::EventMsg;
use codex_core::protocol::InputItem;
use codex_core::protocol::Op;
use codex_core::protocol::TaskCompleteEvent;
use codex_core::util::is_inside_git_repo;
use codex_login::AuthManager;
use codex_ollama::DEFAULT_OSS_MODEL;
use codex_protocol::config_types::SandboxMode;
@@ -183,7 +183,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
// is using.
event_processor.print_config_summary(&config, &prompt);
if !skip_git_repo_check && !is_inside_git_repo(&config.cwd.to_path_buf()) {
if !skip_git_repo_check && get_git_repo_root(&config.cwd.to_path_buf()).is_none() {
eprintln!("Not inside a trusted directory and --skip-git-repo-check was not specified.");
std::process::exit(1);
}

View File

@@ -17,22 +17,6 @@ pub enum ParsedCommand {
query: Option<String>,
path: Option<String>,
},
Format {
cmd: String,
tool: Option<String>,
targets: Option<Vec<String>>,
},
Test {
cmd: String,
},
Lint {
cmd: String,
tool: Option<String>,
targets: Option<Vec<String>>,
},
Noop {
cmd: String,
},
Unknown {
cmd: String,
},

View File

@@ -869,7 +869,9 @@ pub enum FileChange {
Add {
content: String,
},
Delete,
Delete {
content: String,
},
Update {
unified_diff: String,
move_path: Option<PathBuf>,

View File

@@ -49,6 +49,7 @@ image = { version = "^0.25.6", default-features = false, features = [
"jpeg",
"png",
] }
itertools = "0.14.0"
lazy_static = "1"
mcp-types = { path = "../mcp-types" }
once_cell = "1"
@@ -87,6 +88,7 @@ unicode-segmentation = "1.12.0"
unicode-width = "0.1"
url = "2"
uuid = "1"
pathdiff = "0.2"
[target.'cfg(unix)'.dependencies]
libc = "0.2"

View File

@@ -43,6 +43,7 @@ pub(crate) struct App {
// Pager overlay state (Transcript or Static like Diff)
pub(crate) overlay: Option<Overlay>,
pub(crate) deferred_history_lines: Vec<Line<'static>>,
has_emitted_history_lines: bool,
pub(crate) enhanced_keys_supported: bool,
@@ -91,6 +92,7 @@ impl App {
transcript_lines: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),
has_emitted_history_lines: false,
commit_anim_running: Arc::new(AtomicBool::new(false)),
backtrack: BacktrackState::default(),
};
@@ -177,27 +179,23 @@ impl App {
);
tui.frame_requester().schedule_frame();
}
AppEvent::InsertHistoryLines(lines) => {
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
t.insert_lines(lines.clone());
tui.frame_requester().schedule_frame();
}
self.transcript_lines.extend(lines.clone());
if self.overlay.is_some() {
self.deferred_history_lines.extend(lines);
} else {
tui.insert_history_lines(lines);
}
}
AppEvent::InsertHistoryCell(cell) => {
let cell_transcript = cell.transcript_lines();
let mut cell_transcript = cell.transcript_lines();
if !cell.is_stream_continuation() && !self.transcript_lines.is_empty() {
cell_transcript.insert(0, Line::from(""));
}
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
t.insert_lines(cell_transcript.clone());
tui.frame_requester().schedule_frame();
}
self.transcript_lines.extend(cell_transcript.clone());
let display = cell.display_lines();
let mut display = cell.display_lines(tui.terminal.last_known_screen_size.width);
if !display.is_empty() {
if self.has_emitted_history_lines {
display.insert(0, Line::from(""));
} else {
self.has_emitted_history_lines = true;
}
if self.overlay.is_some() {
self.deferred_history_lines.extend(display);
} else {

View File

@@ -1,7 +1,6 @@
use codex_core::protocol::ConversationHistoryResponseEvent;
use codex_core::protocol::Event;
use codex_file_search::FileMatch;
use ratatui::text::Line;
use crate::history_cell::HistoryCell;
@@ -40,7 +39,6 @@ pub(crate) enum AppEvent {
/// Result of computing a `/diff` command.
DiffResult(String),
InsertHistoryLines(Vec<Line<'static>>),
InsertHistoryCell(Box<dyn HistoryCell>),
StartCommitAnimation,

View File

@@ -27,6 +27,7 @@ use super::command_popup::CommandPopup;
use super::file_search_popup::FileSearchPopup;
use super::paste_burst::CharDecision;
use super::paste_burst::PasteBurst;
use crate::bottom_pane::paste_burst::FlushResult;
use crate::slash_command::SlashCommand;
use codex_protocol::custom_prompts::CustomPrompt;
@@ -223,7 +224,7 @@ impl ChatComposer {
let placeholder = format!("[Pasted Content {char_count} chars]");
self.textarea.insert_element(&placeholder);
self.pending_pastes.push((placeholder, pasted));
} else if self.handle_paste_image_path(pasted.clone()) {
} else if char_count > 1 && self.handle_paste_image_path(pasted.clone()) {
self.textarea.insert_str(" ");
} else {
self.textarea.insert_str(&pasted);
@@ -298,12 +299,7 @@ impl ChatComposer {
}
pub(crate) fn flush_paste_burst_if_due(&mut self) -> bool {
let now = Instant::now();
if let Some(pasted) = self.paste_burst.flush_if_due(now) {
let _ = self.handle_paste(pasted);
return true;
}
false
self.handle_paste_burst_flush(Instant::now())
}
pub(crate) fn is_in_paste_burst(&self) -> bool {
@@ -396,9 +392,11 @@ impl ChatComposer {
KeyEvent {
code: KeyCode::Tab, ..
} => {
// Ensure popup filtering/selection reflects the latest composer text
// before applying completion.
let first_line = self.textarea.text().lines().next().unwrap_or("");
popup.on_composer_text_change(first_line.to_string());
if let Some(sel) = popup.selected_item() {
let first_line = self.textarea.text().lines().next().unwrap_or("");
match sel {
CommandItem::Builtin(cmd) => {
let starts_with_cmd = first_line
@@ -853,15 +851,36 @@ impl ChatComposer {
}
}
fn handle_paste_burst_flush(&mut self, now: Instant) -> bool {
match self.paste_burst.flush_if_due(now) {
FlushResult::Paste(pasted) => {
self.handle_paste(pasted);
true
}
FlushResult::Typed(ch) => {
// Mirror insert_str() behavior so popups stay in sync when a
// pending fast char flushes as normal typed input.
self.textarea.insert_str(ch.to_string().as_str());
// Keep popup sync consistent with key handling: prefer slash popup; only
// sync file popup when slash popup is NOT active.
self.sync_command_popup();
if matches!(self.active_popup, ActivePopup::Command(_)) {
self.dismissed_file_popup_token = None;
} else {
self.sync_file_search_popup();
}
true
}
FlushResult::None => false,
}
}
/// Handle generic Input events that modify the textarea content.
fn handle_input_basic(&mut self, input: KeyEvent) -> (InputResult, bool) {
// If we have a buffered non-bracketed paste burst and enough time has
// elapsed since the last char, flush it before handling a new input.
let now = Instant::now();
if let Some(pasted) = self.paste_burst.flush_if_due(now) {
// Reuse normal paste path (handles large-paste placeholders).
self.handle_paste(pasted);
}
self.handle_paste_burst_flush(now);
// If we're capturing a burst and receive Enter, accumulate it instead of inserting.
if matches!(input.code, KeyCode::Enter)
@@ -1672,6 +1691,66 @@ mod tests {
}
}
#[test]
fn slash_popup_model_first_for_mo_ui() {
use insta::assert_snapshot;
use ratatui::Terminal;
use ratatui::backend::TestBackend;
let (tx, _rx) = unbounded_channel::<AppEvent>();
let sender = AppEventSender::new(tx);
let mut composer = ChatComposer::new(
true,
sender,
false,
"Ask Codex to do anything".to_string(),
false,
);
// Type "/mo" humanlike so paste-burst doesnt interfere.
type_chars_humanlike(&mut composer, &['/', 'm', 'o']);
let mut terminal = match Terminal::new(TestBackend::new(60, 4)) {
Ok(t) => t,
Err(e) => panic!("Failed to create terminal: {e}"),
};
terminal
.draw(|f| f.render_widget_ref(composer, f.area()))
.unwrap_or_else(|e| panic!("Failed to draw composer: {e}"));
// Visual snapshot should show the slash popup with /model as the first entry.
assert_snapshot!("slash_popup_mo", terminal.backend());
}
#[test]
fn slash_popup_model_first_for_mo_logic() {
use super::super::command_popup::CommandItem;
let (tx, _rx) = unbounded_channel::<AppEvent>();
let sender = AppEventSender::new(tx);
let mut composer = ChatComposer::new(
true,
sender,
false,
"Ask Codex to do anything".to_string(),
false,
);
type_chars_humanlike(&mut composer, &['/', 'm', 'o']);
match &composer.active_popup {
ActivePopup::Command(popup) => match popup.selected_item() {
Some(CommandItem::Builtin(cmd)) => {
assert_eq!(cmd.command(), "model")
}
Some(CommandItem::UserPrompt(_)) => {
panic!("unexpected prompt selected for '/mo'")
}
None => panic!("no selected command for '/mo'"),
},
_ => panic!("slash popup not active after typing '/mo'"),
}
}
// Test helper: simulate human typing with a brief delay and flush the paste-burst buffer
fn type_chars_humanlike(composer: &mut ChatComposer, chars: &[char]) {
use crossterm::event::KeyCode;

View File

@@ -238,6 +238,20 @@ mod tests {
}
}
#[test]
fn model_is_first_suggestion_for_mo() {
let mut popup = CommandPopup::new(Vec::new());
popup.on_composer_text_change("/mo".to_string());
let matches = popup.filtered_items();
match matches.first() {
Some(CommandItem::Builtin(cmd)) => assert_eq!(cmd.command(), "model"),
Some(CommandItem::UserPrompt(_)) => {
panic!("unexpected prompt ranked before '/model' for '/mo'")
}
None => panic!("expected at least one match for '/mo'"),
}
}
#[test]
fn prompt_discovery_lists_custom_prompts() {
let prompts = vec![

View File

@@ -100,53 +100,47 @@ impl BottomPane {
}
pub fn desired_height(&self, width: u16) -> u16 {
let top_margin = if self.active_view.is_some() { 0 } else { 1 };
// Always reserve one blank row above the pane for visual spacing.
let top_margin = 1;
// Base height depends on whether a modal/overlay is active.
let mut base = if let Some(view) = self.active_view.as_ref() {
view.desired_height(width)
} else {
self.composer.desired_height(width)
let base = match self.active_view.as_ref() {
Some(view) => view.desired_height(width),
None => self.composer.desired_height(width).saturating_add(
self.status
.as_ref()
.map_or(0, |status| status.desired_height(width)),
),
};
// If a status indicator is active and no modal is covering the composer,
// include its height above the composer.
if self.active_view.is_none()
&& let Some(status) = self.status.as_ref()
{
base = base.saturating_add(status.desired_height(width));
}
// Account for bottom padding rows. Top spacing is handled in layout().
base.saturating_add(Self::BOTTOM_PAD_LINES)
.saturating_add(top_margin)
}
fn layout(&self, area: Rect) -> [Rect; 2] {
// Prefer showing the status header when space is extremely tight.
// Drop the top spacer if there is only one row available.
let mut top_margin = if self.active_view.is_some() { 0 } else { 1 };
if area.height <= 1 {
top_margin = 0;
}
let status_height = if self.active_view.is_none() {
if let Some(status) = self.status.as_ref() {
status.desired_height(area.width)
} else {
0
}
// At small heights, bottom pane takes the entire height.
let (top_margin, bottom_margin) = if area.height <= BottomPane::BOTTOM_PAD_LINES + 1 {
(0, 0)
} else {
0
(1, BottomPane::BOTTOM_PAD_LINES)
};
let [_, status, content, _] = Layout::vertical([
Constraint::Max(top_margin),
Constraint::Max(status_height),
Constraint::Min(1),
Constraint::Max(BottomPane::BOTTOM_PAD_LINES),
])
.areas(area);
[status, content]
let area = Rect {
x: area.x,
y: area.y + top_margin,
width: area.width,
height: area.height - top_margin - bottom_margin,
};
match self.active_view.as_ref() {
Some(_) => [Rect::ZERO, area],
None => {
let status_height = self
.status
.as_ref()
.map_or(0, |status| status.desired_height(area.width));
Layout::vertical([Constraint::Max(status_height), Constraint::Min(1)]).areas(area)
}
}
}
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
@@ -705,7 +699,7 @@ mod tests {
pane.set_task_running(true);
// Height=2 → composer visible; status is hidden to preserve composer. Spacer may collapse.
// Height=2 → status on one row, composer on the other.
let area2 = Rect::new(0, 0, 20, 2);
let mut buf2 = Buffer::empty(area2);
(&pane).render_ref(area2, &mut buf2);
@@ -721,8 +715,8 @@ mod tests {
"expected composer to be visible on one of the rows: row0={row0:?}, row1={row1:?}"
);
assert!(
!row0.contains("Working") && !row1.contains("Working"),
"status header should be hidden when height=2"
row0.contains("Working") || row1.contains("Working"),
"expected status header to be visible at height=2: row0={row0:?}, row1={row1:?}"
);
// Height=1 → no padding; single row is the composer (status hidden).

View File

@@ -35,6 +35,12 @@ pub(crate) struct RetroGrab {
pub grabbed: String,
}
pub(crate) enum FlushResult {
Paste(String),
Typed(char),
None,
}
impl PasteBurst {
/// Recommended delay to wait between simulated keypresses (or before
/// scheduling a UI tick) so that a pending fast keystroke is flushed
@@ -95,24 +101,24 @@ impl PasteBurst {
/// now emit that char as normal typed input.
///
/// Returns None if the timeout has not elapsed or there is nothing to flush.
pub fn flush_if_due(&mut self, now: Instant) -> Option<String> {
pub fn flush_if_due(&mut self, now: Instant) -> FlushResult {
let timed_out = self
.last_plain_char_time
.is_some_and(|t| now.duration_since(t) > PASTE_BURST_CHAR_INTERVAL);
if timed_out && self.is_active_internal() {
self.active = false;
let out = std::mem::take(&mut self.buffer);
Some(out)
FlushResult::Paste(out)
} else if timed_out {
// If we were saving a single fast char and no burst followed,
// flush it as normal typed input.
if let Some((ch, _at)) = self.pending_first_char.take() {
Some(ch.to_string())
FlushResult::Typed(ch)
} else {
None
FlushResult::None
}
} else {
None
FlushResult::None
}
}

View File

@@ -0,0 +1,8 @@
---
source: tui/src/bottom_pane/chat_composer.rs
expression: terminal.backend()
---
"▌/mo "
"▌ "
"▌/model choose what model and reasoning effort to use "
"▌/mention mention a file "

View File

@@ -245,7 +245,6 @@ impl TextArea {
} => self.delete_backward_word(),
KeyEvent {
code: KeyCode::Backspace,
modifiers: KeyModifiers::NONE,
..
}
| KeyEvent {

View File

@@ -101,7 +101,6 @@ pub(crate) struct ChatWidget {
// Stream lifecycle controller
stream: StreamController,
running_commands: HashMap<String, RunningCommand>,
pending_exec_completions: Vec<(Vec<String>, Vec<ParsedCommand>, CommandOutput)>,
task_complete_pending: bool,
// Queue of interruptive UI events deferred during an active write cycle
interrupts: InterruptManager,
@@ -113,7 +112,6 @@ pub(crate) struct ChatWidget {
frame_requester: FrameRequester,
// Whether to include the initial welcome banner on session configured
show_welcome_banner: bool,
last_history_was_exec: bool,
// User messages queued while a turn is in progress
queued_user_messages: VecDeque<UserMessage>,
}
@@ -333,6 +331,7 @@ impl ChatWidget {
auto_approved: event.auto_approved,
},
event.changes,
&self.config.cwd,
));
}
@@ -442,14 +441,14 @@ impl ChatWidget {
self.task_complete_pending = false;
}
// A completed stream indicates non-exec content was just inserted.
// Reset the exec header grouping so the next exec shows its header.
self.last_history_was_exec = false;
self.flush_interrupt_queue();
}
}
#[inline]
fn handle_streaming_delta(&mut self, delta: String) {
// Before streaming agent content, flush any active exec cell group.
self.flush_active_exec_cell();
let sink = AppEventHistorySink(self.app_event_tx.clone());
self.stream.begin(&sink);
self.stream.push_and_maybe_commit(&delta, &sink);
@@ -462,31 +461,29 @@ impl ChatWidget {
Some(rc) => (rc.command, rc.parsed_cmd),
None => (vec![ev.call_id.clone()], Vec::new()),
};
self.pending_exec_completions.push((
command,
parsed,
CommandOutput {
exit_code: ev.exit_code,
stdout: ev.stdout.clone(),
stderr: ev.stderr.clone(),
formatted_output: ev.formatted_output.clone(),
},
));
if self.running_commands.is_empty() {
self.active_exec_cell = None;
let pending = std::mem::take(&mut self.pending_exec_completions);
for (command, parsed, output) in pending {
let include_header = !self.last_history_was_exec;
let cell = history_cell::new_completed_exec_command(
command,
parsed,
output,
include_header,
ev.duration,
);
self.add_to_history(cell);
self.last_history_was_exec = true;
if self.active_exec_cell.is_none() {
// This should have been created by handle_exec_begin_now, but in case it wasn't,
// create it now.
self.active_exec_cell = Some(history_cell::new_active_exec_command(
ev.call_id.clone(),
command,
parsed,
));
}
if let Some(cell) = self.active_exec_cell.as_mut() {
cell.complete_call(
&ev.call_id,
CommandOutput {
exit_code: ev.exit_code,
stdout: ev.stdout.clone(),
stderr: ev.stderr.clone(),
formatted_output: ev.formatted_output.clone(),
},
ev.duration,
);
if cell.should_flush() {
self.flush_active_exec_cell();
}
}
}
@@ -495,9 +492,9 @@ impl ChatWidget {
&mut self,
event: codex_core::protocol::PatchApplyEndEvent,
) {
if event.success {
self.add_to_history(history_cell::new_patch_apply_success(event.stdout));
} else {
// If the patch was successful, just let the "Edited" block stand.
// Otherwise, add a failure block.
if !event.success {
self.add_to_history(history_cell::new_patch_apply_failure(event.stderr));
}
}
@@ -523,6 +520,7 @@ impl ChatWidget {
self.add_to_history(history_cell::new_patch_event(
PatchEventType::ApprovalRequest,
ev.changes.clone(),
&self.config.cwd,
));
let request = ApprovalRequest::ApplyPatch {
@@ -543,19 +541,28 @@ impl ChatWidget {
parsed_cmd: ev.parsed_cmd.clone(),
},
);
// Accumulate parsed commands into a single active Exec cell so they stack
match self.active_exec_cell.as_mut() {
Some(exec) => {
exec.parsed.extend(ev.parsed_cmd);
}
_ => {
let include_header = !self.last_history_was_exec;
if let Some(exec) = &self.active_exec_cell {
if let Some(new_exec) = exec.with_added_call(
ev.call_id.clone(),
ev.command.clone(),
ev.parsed_cmd.clone(),
) {
self.active_exec_cell = Some(new_exec);
} else {
// Make a new cell.
self.flush_active_exec_cell();
self.active_exec_cell = Some(history_cell::new_active_exec_command(
ev.command,
ev.parsed_cmd,
include_header,
ev.call_id.clone(),
ev.command.clone(),
ev.parsed_cmd.clone(),
));
}
} else {
self.active_exec_cell = Some(history_cell::new_active_exec_command(
ev.call_id.clone(),
ev.command.clone(),
ev.parsed_cmd.clone(),
));
}
// Request a redraw so the working header and command list are visible immediately.
@@ -585,7 +592,7 @@ impl ChatWidget {
Constraint::Max(
self.active_exec_cell
.as_ref()
.map_or(0, |c| c.desired_height(area.width)),
.map_or(0, |c| c.desired_height(area.width) + 1),
),
Constraint::Min(self.bottom_pane.desired_height(area.width)),
])
@@ -627,13 +634,11 @@ impl ChatWidget {
last_token_usage: TokenUsage::default(),
stream: StreamController::new(config),
running_commands: HashMap::new(),
pending_exec_completions: Vec::new(),
task_complete_pending: false,
interrupts: InterruptManager::new(),
reasoning_buffer: String::new(),
full_reasoning_buffer: String::new(),
session_id: None,
last_history_was_exec: false,
queued_user_messages: VecDeque::new(),
show_welcome_banner: true,
}
@@ -673,13 +678,11 @@ impl ChatWidget {
last_token_usage: TokenUsage::default(),
stream: StreamController::new(config),
running_commands: HashMap::new(),
pending_exec_completions: Vec::new(),
task_complete_pending: false,
interrupts: InterruptManager::new(),
reasoning_buffer: String::new(),
full_reasoning_buffer: String::new(),
session_id: None,
last_history_was_exec: false,
queued_user_messages: VecDeque::new(),
show_welcome_banner: false,
}
@@ -690,7 +693,7 @@ impl ChatWidget {
+ self
.active_exec_cell
.as_ref()
.map_or(0, |c| c.desired_height(width))
.map_or(0, |c| c.desired_height(width) + 1)
}
pub(crate) fn handle_key_event(&mut self, key_event: KeyEvent) {
@@ -891,18 +894,15 @@ impl ChatWidget {
fn flush_active_exec_cell(&mut self) {
if let Some(active) = self.active_exec_cell.take() {
self.last_history_was_exec = true;
self.app_event_tx
.send(AppEvent::InsertHistoryCell(Box::new(active)));
}
}
fn add_to_history(&mut self, cell: impl HistoryCell + 'static) {
// Only break exec grouping if the cell renders visible lines.
let has_display_lines = !cell.display_lines().is_empty();
self.flush_active_exec_cell();
if has_display_lines {
self.last_history_was_exec = false;
if !cell.display_lines(u16::MAX).is_empty() {
// Only break exec grouping if the cell renders visible lines.
self.flush_active_exec_cell();
}
self.app_event_tx
.send(AppEvent::InsertHistoryCell(Box::new(cell)));
@@ -1028,7 +1028,6 @@ impl ChatWidget {
let cell = cell.into_failed();
// Insert finalized exec into history and keep grouping consistent.
self.add_to_history(cell);
self.last_history_was_exec = true;
}
}
@@ -1284,6 +1283,9 @@ impl WidgetRef for &ChatWidget {
let [active_cell_area, bottom_pane_area] = self.layout_areas(area);
(&self.bottom_pane).render(bottom_pane_area, buf);
if let Some(cell) = &self.active_exec_cell {
let mut active_cell_area = active_cell_area;
active_cell_area.y += 1;
active_cell_area.height -= 1;
cell.render_ref(active_cell_area, buf);
}
}

View File

@@ -0,0 +1,5 @@
---
source: tui/src/chatwidget/tests.rs
expression: lines_to_single_string(&approved_lines)
---
• Change Approved foo.txt (+1 -0)

View File

@@ -0,0 +1,6 @@
---
source: tui/src/chatwidget/tests.rs
expression: lines_to_single_string(&proposed_lines)
---
• Proposed Change foo.txt (+1 -0)
1 +hello

View File

@@ -3,6 +3,7 @@ source: tui/src/chatwidget/tests.rs
assertion_line: 728
expression: terminal.backend()
---
" "
"? Codex wants to run echo hello world "
" "
"Model wants to run a command "

View File

@@ -0,0 +1,11 @@
---
source: tui/src/chatwidget/tests.rs
expression: terminal.backend()
---
" "
"? Codex wants to run echo hello world "
" "
"▌Allow command? "
"▌ Yes Always No, provide feedback "
"▌ Approve and run the command "
" "

View File

@@ -1,8 +1,9 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 763
assertion_line: 794
expression: terminal.backend()
---
" "
"The model wants to apply changes "
" "
"This will grant write access to /tmp for the remainder of this session. "

View File

@@ -0,0 +1,57 @@
---
source: tui/src/chatwidget/tests.rs
expression: visible_after
---
> Im going to scan the workspace and Cargo manifests to see build profiles and
dependencies that impact binary size. Then Ill summarize the main causes.
• Explored
└ List ls -la
Read Cargo.toml
• Ran
└ for d in ansi-escape apply-patch arg0 cli common core exec execpolicy
file-search linux-sandbox login mcp-client mcp-server mcp-types ollama
tui; do echo "--- $d/Cargo.toml"; sed -n '1,200p' $d/Cargo.toml; echo;
done
• Explored
└ Read Cargo.toml
> Heres whats driving size in this workspaces binaries.
Main Causes
- Static linking style: Each bin (codex, codex-tui, codex-exec,
codex-mcp-server, etc.) statically links its full dependency graph, so common
code isnt shared at runtime across executables.
- Heavy deps (HTTP/TLS): reqwest brings in Hyper, HTTP/2, compressors,
and a TLS stack (rustls by default; OpenSSL on musl). In core, login, tui,
and ollama you enable reqwest with json/stream, which still pulls a large
transitive set.
- Image/terminal stack: codex-tui includes image (with jpeg), ratatui,
crossterm, and ratatui-image, which together contribute significant code size
(decoders, pixel ops, terminal backends).
- Parsers/VMs: tree-sitter + tree-sitter-bash (in core and apply-patch) and
starlark (in execpolicy) include sizeable parser/VM tables and runtimes.
- Tokio runtime: Broad tokio features (rt-multi-thread, macros, process,
signal) across many crates inflate code size even if only subsets are used
per bin.
- Panic + backtraces: Default panic = unwind and backtrace support keep
unwinding tables and symbols that add weight.
- Per-target OpenSSL (musl): For *-unknown-linux-musl, core enables
openssl-sys with vendored, compiling OpenSSL into the binary—this adds
multiple megabytes per executable.
Build-Mode Notes
- Release settings: You use lto = "fat" and codegen-units = 1 (good for size),
but strip = "symbols" keeps debuginfo. Debuginfo is often the largest single
contributor; if you build in release with that setting, binaries can still
be large.
- Debug builds: cargo build (dev profile) includes full debuginfo, no LTO, and
assertions—outputs are much larger than cargo build --release.
If you want, I can outline targeted trims (e.g., strip = "debuginfo",
opt-level = "z", panic abort, tighter tokio/reqwest features) and estimate
impact per binary.

View File

@@ -1,7 +1,6 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 779
expression: terminal.backend()
---
"▌ Ask Codex to do anything "
" "
" ⏎ send Ctrl+J newline Ctrl+T transc"

View File

@@ -1,6 +1,5 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 779
expression: terminal.backend()
---
" "

View File

@@ -1,7 +1,6 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 807
expression: terminal.backend()
---
" Thinking (0s • Esc to interrupt) "
"▌ Ask Codex to do anything "
" "

View File

@@ -1,6 +1,5 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 807
expression: terminal.backend()
---
" "

View File

@@ -0,0 +1,15 @@
---
source: tui/src/chatwidget/tests.rs
expression: visual
---
> Im going to search the repo for where “Change Approved” is rendered to update
that view.
• Explored
└ Search Change Approved
Read diff_render.rs
Investigating rendering code (0s • Esc to interrupt)
▌Summarize recent commits
⏎ send Ctrl+J newline Ctrl+T transcript Ctrl+C quit

View File

@@ -2,5 +2,4 @@
source: tui/src/chatwidget/tests.rs
expression: combined
---
codex
Here is the result.
> Here is the result.

View File

@@ -0,0 +1,6 @@
---
source: tui/src/chatwidget/tests.rs
expression: blob1
---
⠋ Exploring
└ List ls -la

View File

@@ -0,0 +1,6 @@
---
source: tui/src/chatwidget/tests.rs
expression: blob2
---
• Explored
└ List ls -la

View File

@@ -0,0 +1,7 @@
---
source: tui/src/chatwidget/tests.rs
expression: blob3
---
⠋ Exploring
└ List ls -la
Read foo.txt

View File

@@ -0,0 +1,7 @@
---
source: tui/src/chatwidget/tests.rs
expression: blob4
---
• Explored
└ List ls -la
Read foo.txt

View File

@@ -0,0 +1,7 @@
---
source: tui/src/chatwidget/tests.rs
expression: blob5
---
• Explored
└ List ls -la
Read foo.txt

View File

@@ -0,0 +1,7 @@
---
source: tui/src/chatwidget/tests.rs
expression: blob6
---
• Explored
└ List ls -la
Read foo.txt, bar.txt

View File

@@ -2,5 +2,4 @@
source: tui/src/chatwidget/tests.rs
expression: combined
---
codex
Here is the result.
> Here is the result.

View File

@@ -2,5 +2,4 @@
source: tui/src/chatwidget/tests.rs
expression: exec_blob
---
>_
✗ ⌨sleep 1
• Ran sleep 1

View File

@@ -1,6 +1,5 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 878
expression: terminal.backend()
---
" "

View File

@@ -1,8 +1,9 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 851
assertion_line: 921
expression: terminal.backend()
---
" "
"? Codex wants to run echo 'hello world' "
" "
"Codex wants to run a command "

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
pub(crate) const DEFAULT_WRAP_COLS: u16 = 80;

View File

@@ -1,16 +1,17 @@
use crossterm::terminal;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::style::Stylize;
use ratatui::text::Line as RtLine;
use ratatui::text::Span as RtSpan;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use crate::common::DEFAULT_WRAP_COLS;
use codex_core::protocol::FileChange;
use crate::exec_command::relativize_to_home;
use crate::history_cell::PatchEventType;
use codex_core::git_info::get_git_repo_root;
use codex_core::protocol::FileChange;
const SPACES_AFTER_LINE_NUMBER: usize = 6;
@@ -22,205 +23,199 @@ enum DiffLineType {
}
pub(crate) fn create_diff_summary(
title: &str,
changes: &HashMap<PathBuf, FileChange>,
event_type: PatchEventType,
cwd: &Path,
wrap_cols: usize,
) -> Vec<RtLine<'static>> {
struct FileSummary {
display_path: String,
added: usize,
removed: usize,
}
let count_from_unified = |diff: &str| -> (usize, usize) {
if let Ok(patch) = diffy::Patch::from_str(diff) {
patch
.hunks()
.iter()
.flat_map(|h| h.lines())
.fold((0, 0), |(a, d), l| match l {
diffy::Line::Insert(_) => (a + 1, d),
diffy::Line::Delete(_) => (a, d + 1),
_ => (a, d),
})
} else {
// Fallback: manual scan to preserve counts even for unparsable diffs
let mut adds = 0usize;
let mut dels = 0usize;
for l in diff.lines() {
if l.starts_with("+++") || l.starts_with("---") || l.starts_with("@@") {
continue;
}
match l.as_bytes().first() {
Some(b'+') => adds += 1,
Some(b'-') => dels += 1,
_ => {}
}
let rows = collect_rows(changes);
let header_kind = match event_type {
PatchEventType::ApplyBegin { auto_approved } => {
if auto_approved {
HeaderKind::Edited
} else {
HeaderKind::ChangeApproved
}
(adds, dels)
}
PatchEventType::ApprovalRequest => HeaderKind::ProposedChange,
};
let mut files: Vec<FileSummary> = Vec::new();
for (path, change) in changes.iter() {
match change {
FileChange::Add { content } => files.push(FileSummary {
display_path: path.display().to_string(),
added: content.lines().count(),
removed: 0,
}),
FileChange::Delete => files.push(FileSummary {
display_path: path.display().to_string(),
added: 0,
removed: std::fs::read_to_string(path)
.ok()
.map(|s| s.lines().count())
.unwrap_or(0),
}),
FileChange::Update {
unified_diff,
move_path,
} => {
let (added, removed) = count_from_unified(unified_diff);
let display_path = if let Some(new_path) = move_path {
format!("{}{}", path.display(), new_path.display())
} else {
path.display().to_string()
};
files.push(FileSummary {
display_path,
added,
removed,
});
}
}
}
let file_count = files.len();
let total_added: usize = files.iter().map(|f| f.added).sum();
let total_removed: usize = files.iter().map(|f| f.removed).sum();
let noun = if file_count == 1 { "file" } else { "files" };
let mut out: Vec<RtLine<'static>> = Vec::new();
// Header
let mut header_spans: Vec<RtSpan<'static>> = Vec::new();
header_spans.push(RtSpan::styled(
title.to_owned(),
Style::default()
.fg(Color::Magenta)
.add_modifier(Modifier::BOLD),
));
header_spans.push(RtSpan::raw(" to "));
header_spans.push(RtSpan::raw(format!("{file_count} {noun} ")));
header_spans.push(RtSpan::raw("("));
header_spans.push(RtSpan::styled(
format!("+{total_added}"),
Style::default().fg(Color::Green),
));
header_spans.push(RtSpan::raw(" "));
header_spans.push(RtSpan::styled(
format!("-{total_removed}"),
Style::default().fg(Color::Red),
));
header_spans.push(RtSpan::raw(")"));
out.push(RtLine::from(header_spans));
// Dimmed per-file lines with prefix
for (idx, f) in files.iter().enumerate() {
let mut spans: Vec<RtSpan<'static>> = Vec::new();
spans.push(RtSpan::raw(f.display_path.clone()));
// Show per-file +/- counts only when there are multiple files
if file_count > 1 {
spans.push(RtSpan::raw(" ("));
spans.push(RtSpan::styled(
format!("+{}", f.added),
Style::default().fg(Color::Green),
));
spans.push(RtSpan::raw(" "));
spans.push(RtSpan::styled(
format!("-{}", f.removed),
Style::default().fg(Color::Red),
));
spans.push(RtSpan::raw(")"));
}
let mut line = RtLine::from(spans);
let prefix = if idx == 0 { "" } else { " " };
line.spans.insert(0, prefix.into());
line.spans
.iter_mut()
.for_each(|span| span.style = span.style.add_modifier(Modifier::DIM));
out.push(line);
}
let show_details = matches!(
event_type,
PatchEventType::ApplyBegin {
auto_approved: true
} | PatchEventType::ApprovalRequest
);
if show_details {
out.extend(render_patch_details(changes));
}
out
render_changes_block(rows, wrap_cols, header_kind, cwd)
}
fn render_patch_details(changes: &HashMap<PathBuf, FileChange>) -> Vec<RtLine<'static>> {
let mut out: Vec<RtLine<'static>> = Vec::new();
let term_cols: usize = terminal::size()
.map(|(w, _)| w as usize)
.unwrap_or(DEFAULT_WRAP_COLS.into());
// Shared row for per-file presentation
#[derive(Clone)]
struct Row {
#[allow(dead_code)]
path: PathBuf,
move_path: Option<PathBuf>,
added: usize,
removed: usize,
change: FileChange,
}
for (index, (path, change)) in changes.iter().enumerate() {
let is_first_file = index == 0;
// Add separator only between files (not at the very start)
if !is_first_file {
out.push(RtLine::from(vec![
RtSpan::raw(" "),
RtSpan::styled("...", style_dim()),
]));
fn collect_rows(changes: &HashMap<PathBuf, FileChange>) -> Vec<Row> {
let mut rows: Vec<Row> = Vec::new();
for (path, change) in changes.iter() {
let (added, removed) = match change {
FileChange::Add { content } => (content.lines().count(), 0),
FileChange::Delete { content } => (0, content.lines().count()),
FileChange::Update { unified_diff, .. } => calculate_add_remove_from_diff(unified_diff),
};
let move_path = match change {
FileChange::Update {
move_path: Some(new),
..
} => Some(new.clone()),
_ => None,
};
rows.push(Row {
path: path.clone(),
move_path,
added,
removed,
change: change.clone(),
});
}
rows.sort_by_key(|r| r.path.clone());
rows
}
enum HeaderKind {
ProposedChange,
Edited,
ChangeApproved,
}
fn render_changes_block(
rows: Vec<Row>,
wrap_cols: usize,
header_kind: HeaderKind,
cwd: &Path,
) -> Vec<RtLine<'static>> {
let mut out: Vec<RtLine<'static>> = Vec::new();
let term_cols = wrap_cols;
fn render_line_count_summary(added: usize, removed: usize) -> Vec<RtSpan<'static>> {
let mut spans = Vec::new();
spans.push("(".into());
spans.push(format!("+{added}").green());
spans.push(" ".into());
spans.push(format!("-{removed}").red());
spans.push(")".into());
spans
}
let render_path = |row: &Row| -> Vec<RtSpan<'static>> {
let mut spans = Vec::new();
spans.push(display_path_for(&row.path, cwd).into());
if let Some(move_path) = &row.move_path {
spans.push(format!("{}", display_path_for(move_path, cwd)).into());
}
match change {
spans
};
// Header
let total_added: usize = rows.iter().map(|r| r.added).sum();
let total_removed: usize = rows.iter().map(|r| r.removed).sum();
let file_count = rows.len();
let noun = if file_count == 1 { "file" } else { "files" };
let mut header_spans: Vec<RtSpan<'static>> = vec!["".into()];
match header_kind {
HeaderKind::ProposedChange => {
header_spans.push("Proposed Change".bold());
if let [row] = &rows[..] {
header_spans.push(" ".into());
header_spans.extend(render_path(row));
header_spans.push(" ".into());
header_spans.extend(render_line_count_summary(row.added, row.removed));
} else {
header_spans.push(format!(" to {file_count} {noun} ").into());
header_spans.extend(render_line_count_summary(total_added, total_removed));
}
}
HeaderKind::Edited => {
if let [row] = &rows[..] {
let verb = match &row.change {
FileChange::Add { .. } => "Added",
FileChange::Delete { .. } => "Deleted",
_ => "Edited",
};
header_spans.push(verb.bold());
header_spans.push(" ".into());
header_spans.extend(render_path(row));
header_spans.push(" ".into());
header_spans.extend(render_line_count_summary(row.added, row.removed));
} else {
header_spans.push("Edited".bold());
header_spans.push(format!(" {file_count} {noun} ").into());
header_spans.extend(render_line_count_summary(total_added, total_removed));
}
}
HeaderKind::ChangeApproved => {
header_spans.push("Change Approved".bold());
if let [row] = &rows[..] {
header_spans.push(" ".into());
header_spans.extend(render_path(row));
header_spans.push(" ".into());
header_spans.extend(render_line_count_summary(row.added, row.removed));
} else {
header_spans.push(format!(" {file_count} {noun} ").into());
header_spans.extend(render_line_count_summary(total_added, total_removed));
}
}
}
out.push(RtLine::from(header_spans));
// For Change Approved, we only show the header summary and no per-file/diff details.
if matches!(header_kind, HeaderKind::ChangeApproved) {
return out;
}
for (idx, r) in rows.into_iter().enumerate() {
// Insert a blank separator between file chunks (except before the first)
if idx > 0 {
out.push("".into());
}
// File header line (skip when single-file header already shows the name)
let skip_file_header =
matches!(header_kind, HeaderKind::ProposedChange | HeaderKind::Edited)
&& file_count == 1;
if !skip_file_header {
let mut header: Vec<RtSpan<'static>> = Vec::new();
header.push("".dim());
header.extend(render_path(&r));
header.push(" ".into());
header.extend(render_line_count_summary(r.added, r.removed));
out.push(RtLine::from(header));
}
match r.change {
FileChange::Add { content } => {
for (i, raw) in content.lines().enumerate() {
let ln = i + 1;
out.extend(push_wrapped_diff_line(
ln,
i + 1,
DiffLineType::Insert,
raw,
term_cols,
));
}
}
FileChange::Delete => {
let original = std::fs::read_to_string(path).unwrap_or_default();
for (i, raw) in original.lines().enumerate() {
let ln = i + 1;
FileChange::Delete { content } => {
for (i, raw) in content.lines().enumerate() {
out.extend(push_wrapped_diff_line(
ln,
i + 1,
DiffLineType::Delete,
raw,
term_cols,
));
}
}
FileChange::Update {
unified_diff,
move_path: _,
} => {
if let Ok(patch) = diffy::Patch::from_str(unified_diff) {
FileChange::Update { unified_diff, .. } => {
if let Ok(patch) = diffy::Patch::from_str(&unified_diff) {
let mut is_first_hunk = true;
for h in patch.hunks() {
// Render a simple separator between non-contiguous hunks
// instead of diff-style @@ headers.
if !is_first_hunk {
out.push(RtLine::from(vec![
RtSpan::raw(" "),
RtSpan::styled("", style_dim()),
]));
out.push(RtLine::from(vec![" ".into(), "".dim()]));
}
is_first_hunk = false;
@@ -265,13 +260,41 @@ fn render_patch_details(changes: &HashMap<PathBuf, FileChange>) -> Vec<RtLine<'s
}
}
}
out.push(RtLine::from(RtSpan::raw("")));
}
out
}
fn display_path_for(path: &Path, cwd: &Path) -> String {
let path_in_same_repo = match (get_git_repo_root(cwd), get_git_repo_root(path)) {
(Some(cwd_repo), Some(path_repo)) => cwd_repo == path_repo,
_ => false,
};
let chosen = if path_in_same_repo {
pathdiff::diff_paths(path, cwd).unwrap_or_else(|| path.to_path_buf())
} else {
relativize_to_home(path).unwrap_or_else(|| path.to_path_buf())
};
chosen.display().to_string()
}
fn calculate_add_remove_from_diff(diff: &str) -> (usize, usize) {
if let Ok(patch) = diffy::Patch::from_str(diff) {
patch
.hunks()
.iter()
.flat_map(|h| h.lines())
.fold((0, 0), |(a, d), l| match l {
diffy::Line::Insert(_) => (a + 1, d),
diffy::Line::Delete(_) => (a, d + 1),
diffy::Line::Context(_) => (a, d),
})
} else {
// For unparsable diffs, return 0 for both counts.
(0, 0)
}
}
fn push_wrapped_diff_line(
line_number: usize,
kind: DiffLineType,
@@ -290,10 +313,10 @@ fn push_wrapped_diff_line(
let prefix_cols = indent.len() + ln_str.len() + gap_after_ln;
let mut first = true;
let (sign_opt, line_style) = match kind {
DiffLineType::Insert => (Some('+'), Some(style_add())),
DiffLineType::Delete => (Some('-'), Some(style_del())),
DiffLineType::Context => (None, None),
let (sign_char, line_style) = match kind {
DiffLineType::Insert => ('+', style_add()),
DiffLineType::Delete => ('-', style_del()),
DiffLineType::Context => (' ', style_context()),
};
let mut lines: Vec<RtLine<'static>> = Vec::new();
@@ -301,9 +324,7 @@ fn push_wrapped_diff_line(
// Fit the content for the current terminal row:
// compute how many columns are available after the prefix, then split
// at a UTF-8 character boundary so this row's chunk fits exactly.
let available_content_cols = term_cols
.saturating_sub(if first { prefix_cols + 1 } else { prefix_cols })
.max(1);
let available_content_cols = term_cols.saturating_sub(prefix_cols + 1).max(1);
let split_at_byte_index = remaining_text
.char_indices()
.nth(available_content_cols)
@@ -313,41 +334,22 @@ fn push_wrapped_diff_line(
remaining_text = rest;
if first {
let mut spans: Vec<RtSpan<'static>> = Vec::new();
spans.push(RtSpan::raw(indent));
spans.push(RtSpan::styled(ln_str.clone(), style_dim()));
spans.push(RtSpan::raw(" ".repeat(gap_after_ln)));
// Always include a sign character at the start of the displayed chunk
// ('+' for insert, '-' for delete, ' ' for context) so gutters align.
let sign_char = sign_opt.unwrap_or(' ');
let display_chunk = format!("{sign_char}{chunk}");
let content_span = match line_style {
Some(style) => RtSpan::styled(display_chunk, style),
None => RtSpan::raw(display_chunk),
};
spans.push(content_span);
let mut line = RtLine::from(spans);
if let Some(style) = line_style {
line.style = line.style.patch(style);
}
lines.push(line);
// Build gutter (indent + line number + spacing) as a dimmed span
let gutter = format!("{indent}{ln_str}{}", " ".repeat(gap_after_ln));
// Content with a sign ('+'/'-'/' ') styled per diff kind
let content = format!("{sign_char}{chunk}");
lines.push(RtLine::from(vec![
RtSpan::styled(gutter, style_gutter()),
RtSpan::styled(content, line_style),
]));
first = false;
} else {
// Continuation lines keep a space for the sign column so content aligns
let hang_prefix = format!(
"{indent}{}{} ",
" ".repeat(ln_str.len()),
" ".repeat(gap_after_ln)
);
let content_span = match line_style {
Some(style) => RtSpan::styled(chunk.to_string(), style),
None => RtSpan::raw(chunk.to_string()),
};
let mut line = RtLine::from(vec![RtSpan::raw(hang_prefix), content_span]);
if let Some(style) = line_style {
line.style = line.style.patch(style);
}
lines.push(line);
let gutter = format!("{indent}{} ", " ".repeat(ln_str.len() + gap_after_ln));
lines.push(RtLine::from(vec![
RtSpan::styled(gutter, style_gutter()),
RtSpan::styled(chunk.to_string(), line_style),
]));
}
if remaining_text.is_empty() {
break;
@@ -356,10 +358,14 @@ fn push_wrapped_diff_line(
lines
}
fn style_dim() -> Style {
fn style_gutter() -> Style {
Style::default().add_modifier(Modifier::DIM)
}
fn style_context() -> Style {
Style::default()
}
fn style_add() -> Style {
Style::default().fg(Color::Green)
}
@@ -378,6 +384,12 @@ mod tests {
use ratatui::widgets::Paragraph;
use ratatui::widgets::WidgetRef;
use ratatui::widgets::Wrap;
fn diff_summary_for_tests(
changes: &HashMap<PathBuf, FileChange>,
event_type: PatchEventType,
) -> Vec<RtLine<'static>> {
create_diff_summary(changes, event_type, &PathBuf::from("/"), 80)
}
fn snapshot_lines(name: &str, lines: Vec<RtLine<'static>>, width: u16, height: u16) {
let mut terminal = Terminal::new(TestBackend::new(width, height)).expect("terminal");
@@ -391,6 +403,23 @@ mod tests {
assert_snapshot!(name, terminal.backend());
}
fn snapshot_lines_text(name: &str, lines: &[RtLine<'static>]) {
// Convert Lines to plain text rows and trim trailing spaces so it's
// easier to validate indentation visually in snapshots.
let text = lines
.iter()
.map(|l| {
l.spans
.iter()
.map(|s| s.content.as_ref())
.collect::<String>()
})
.map(|s| s.trim_end().to_string())
.collect::<Vec<_>>()
.join("\n");
assert_snapshot!(name, text);
}
#[test]
fn ui_snapshot_add_details() {
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
@@ -401,8 +430,7 @@ mod tests {
},
);
let lines =
create_diff_summary("proposed patch", &changes, PatchEventType::ApprovalRequest);
let lines = diff_summary_for_tests(&changes, PatchEventType::ApprovalRequest);
snapshot_lines("add_details", lines, 80, 10);
}
@@ -423,8 +451,7 @@ mod tests {
},
);
let lines =
create_diff_summary("proposed patch", &changes, PatchEventType::ApprovalRequest);
let lines = diff_summary_for_tests(&changes, PatchEventType::ApprovalRequest);
snapshot_lines("update_details_with_rename", lines, 80, 12);
}
@@ -435,11 +462,10 @@ mod tests {
let long_line = "this is a very long line that should wrap across multiple terminal columns and continue";
// Call the wrapping function directly so we can precisely control the width
let lines =
push_wrapped_diff_line(1, DiffLineType::Insert, long_line, DEFAULT_WRAP_COLS.into());
let lines = push_wrapped_diff_line(1, DiffLineType::Insert, long_line, 80);
// Render into a small terminal to capture the visual layout
snapshot_lines("wrap_behavior_insert", lines, DEFAULT_WRAP_COLS + 10, 8);
snapshot_lines("wrap_behavior_insert", lines, 90, 8);
}
#[test]
@@ -458,8 +484,7 @@ mod tests {
},
);
let lines =
create_diff_summary("proposed patch", &changes, PatchEventType::ApprovalRequest);
let lines = diff_summary_for_tests(&changes, PatchEventType::ApprovalRequest);
snapshot_lines("single_line_replacement_counts", lines, 80, 8);
}
@@ -480,8 +505,7 @@ mod tests {
},
);
let lines =
create_diff_summary("proposed patch", &changes, PatchEventType::ApprovalRequest);
let lines = diff_summary_for_tests(&changes, PatchEventType::ApprovalRequest);
snapshot_lines("blank_context_line", lines, 80, 10);
}
@@ -503,10 +527,232 @@ mod tests {
},
);
let lines =
create_diff_summary("proposed patch", &changes, PatchEventType::ApprovalRequest);
let lines = diff_summary_for_tests(&changes, PatchEventType::ApprovalRequest);
// Height is large enough to show both hunks and the separator
snapshot_lines("vertical_ellipsis_between_hunks", lines, 80, 16);
}
#[test]
fn ui_snapshot_apply_update_block() {
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
let original = "line one\nline two\nline three\n";
let modified = "line one\nline two changed\nline three\n";
let patch = diffy::create_patch(original, modified).to_string();
changes.insert(
PathBuf::from("example.txt"),
FileChange::Update {
unified_diff: patch,
move_path: None,
},
);
for (name, auto_approved) in [
("apply_update_block", true),
("apply_update_block_manual", false),
] {
let lines =
diff_summary_for_tests(&changes, PatchEventType::ApplyBegin { auto_approved });
snapshot_lines(name, lines, 80, 12);
}
}
#[test]
fn ui_snapshot_apply_update_with_rename_block() {
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
let original = "A\nB\nC\n";
let modified = "A\nB changed\nC\n";
let patch = diffy::create_patch(original, modified).to_string();
changes.insert(
PathBuf::from("old_name.rs"),
FileChange::Update {
unified_diff: patch,
move_path: Some(PathBuf::from("new_name.rs")),
},
);
let lines = diff_summary_for_tests(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
);
snapshot_lines("apply_update_with_rename_block", lines, 80, 12);
}
#[test]
fn ui_snapshot_apply_multiple_files_block() {
// Two files: one update and one add, to exercise combined header and per-file rows
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
// File a.txt: single-line replacement (one delete, one insert)
let patch_a = diffy::create_patch("one\n", "one changed\n").to_string();
changes.insert(
PathBuf::from("a.txt"),
FileChange::Update {
unified_diff: patch_a,
move_path: None,
},
);
// File b.txt: newly added with one line
changes.insert(
PathBuf::from("b.txt"),
FileChange::Add {
content: "new\n".to_string(),
},
);
let lines = diff_summary_for_tests(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
);
snapshot_lines("apply_multiple_files_block", lines, 80, 14);
}
#[test]
fn ui_snapshot_apply_add_block() {
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
changes.insert(
PathBuf::from("new_file.txt"),
FileChange::Add {
content: "alpha\nbeta\n".to_string(),
},
);
let lines = diff_summary_for_tests(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
);
snapshot_lines("apply_add_block", lines, 80, 10);
}
#[test]
fn ui_snapshot_apply_delete_block() {
// Write a temporary file so the delete renderer can read original content
let tmp_path = PathBuf::from("tmp_delete_example.txt");
std::fs::write(&tmp_path, "first\nsecond\nthird\n").expect("write tmp file");
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
changes.insert(
tmp_path.clone(),
FileChange::Delete {
content: "first\nsecond\nthird\n".to_string(),
},
);
let lines = diff_summary_for_tests(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
);
// Cleanup best-effort; rendering has already read the file
let _ = std::fs::remove_file(&tmp_path);
snapshot_lines("apply_delete_block", lines, 80, 12);
}
#[test]
fn ui_snapshot_apply_update_block_wraps_long_lines() {
// Create a patch with a long modified line to force wrapping
let original = "line 1\nshort\nline 3\n";
let modified = "line 1\nshort this_is_a_very_long_modified_line_that_should_wrap_across_multiple_terminal_columns_and_continue_even_further_beyond_eighty_columns_to_force_multiple_wraps\nline 3\n";
let patch = diffy::create_patch(original, modified).to_string();
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
changes.insert(
PathBuf::from("long_example.txt"),
FileChange::Update {
unified_diff: patch,
move_path: None,
},
);
let lines = create_diff_summary(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
&PathBuf::from("/"),
72,
);
// Render with backend width wider than wrap width to avoid Paragraph auto-wrap.
snapshot_lines("apply_update_block_wraps_long_lines", lines, 80, 12);
}
#[test]
fn ui_snapshot_apply_update_block_wraps_long_lines_text() {
// This mirrors the desired layout example: sign only on first inserted line,
// subsequent wrapped pieces start aligned under the line number gutter.
let original = "1\n2\n3\n4\n";
let modified = "1\nadded long line which wraps and_if_there_is_a_long_token_it_will_be_broken\n3\n4 context line which also wraps across\n";
let patch = diffy::create_patch(original, modified).to_string();
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
changes.insert(
PathBuf::from("wrap_demo.txt"),
FileChange::Update {
unified_diff: patch,
move_path: None,
},
);
let mut lines = create_diff_summary(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
&PathBuf::from("/"),
28,
);
// Drop the combined header for this text-only snapshot
if !lines.is_empty() {
lines.remove(0);
}
snapshot_lines_text("apply_update_block_wraps_long_lines_text", &lines);
}
#[test]
fn ui_snapshot_apply_update_block_relativizes_path() {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("/"));
let abs_old = cwd.join("abs_old.rs");
let abs_new = cwd.join("abs_new.rs");
let original = "X\nY\n";
let modified = "X changed\nY\n";
let patch = diffy::create_patch(original, modified).to_string();
let mut changes: HashMap<PathBuf, FileChange> = HashMap::new();
changes.insert(
abs_old.clone(),
FileChange::Update {
unified_diff: patch,
move_path: Some(abs_new.clone()),
},
);
let lines = create_diff_summary(
&changes,
PatchEventType::ApplyBegin {
auto_approved: true,
},
&cwd,
80,
);
snapshot_lines("apply_update_block_relativizes_path", lines, 80, 10);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,8 @@ use ratatui::text::Span;
use textwrap::Options as TwOptions;
use textwrap::WordSplitter;
/// Insert `lines` above the viewport.
/// Insert `lines` above the viewport using the terminal's backend writer
/// (avoids direct stdout references).
pub(crate) fn insert_history_lines(terminal: &mut tui::Terminal, lines: Vec<Line>) {
let mut out = std::io::stdout();
insert_history_lines_to_writer(terminal, &mut out, lines);
@@ -262,7 +263,10 @@ where
}
/// Word-aware wrapping for a list of `Line`s preserving styles.
pub(crate) fn word_wrap_lines(lines: &[Line], width: u16) -> Vec<Line<'static>> {
pub(crate) fn word_wrap_lines<'a, I>(lines: I, width: u16) -> Vec<Line<'static>>
where
I: IntoIterator<Item = &'a Line<'a>>,
{
let mut out = Vec::new();
let w = width.max(1) as usize;
for line in lines {

View File

@@ -34,7 +34,6 @@ mod chatwidget;
mod citation_regex;
mod cli;
mod clipboard_paste;
mod common;
pub mod custom_terminal;
mod diff_render;
mod exec_command;

View File

@@ -1,4 +1,4 @@
use codex_core::util::is_inside_git_repo;
use codex_core::git_info::get_git_repo_root;
use codex_login::AuthManager;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
@@ -88,7 +88,7 @@ impl OnboardingScreen {
auth_manager,
}))
}
let is_git_repo = is_inside_git_repo(&cwd);
let is_git_repo = get_git_repo_root(&cwd).is_some();
let highlighted = if is_git_repo {
TrustDirectorySelection::Trust
} else {

View File

@@ -140,16 +140,6 @@ pub(crate) fn log_inbound_app_event(event: &AppEvent) {
});
LOGGER.write_json_line(value);
}
// Internal UI events; still log for fidelity, but avoid heavy payloads.
AppEvent::InsertHistoryLines(lines) => {
let value = json!({
"ts": now_ts(),
"dir": "to_tui",
"kind": "insert_history",
"lines": lines.len(),
});
LOGGER.write_json_line(value);
}
AppEvent::InsertHistoryCell(cell) => {
let value = json!({
"ts": now_ts(),

View File

@@ -1,9 +1,9 @@
---
source: tui/src/diff_render.rs
assertion_line: 765
expression: terminal.backend()
---
"proposed patch to 1 file (+2 -0) "
" └ README.md "
"• Proposed Change README.md (+2 -0) "
" 1 +first line "
" 2 +second line "
" "
@@ -12,3 +12,4 @@ expression: terminal.backend()
" "
" "
" "
" "

View File

@@ -0,0 +1,14 @@
---
source: tui/src/diff_render.rs
expression: terminal.backend()
---
"• Added new_file.txt (+2 -0) "
" 1 +alpha "
" 2 +beta "
" "
" "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,16 @@
---
source: tui/src/diff_render.rs
expression: terminal.backend()
---
"• Deleted tmp_delete_example.txt (+0 -3) "
" 1 -first "
" 2 -second "
" 3 -third "
" "
" "
" "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,18 @@
---
source: tui/src/diff_render.rs
expression: terminal.backend()
---
"• Edited 2 files (+2 -1) "
" └ a.txt (+1 -1) "
" 1 -one "
" 1 +one changed "
" "
" └ b.txt (+1 -0) "
" 1 +new "
" "
" "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,17 @@
---
source: tui/src/diff_render.rs
assertion_line: 748
expression: terminal.backend()
---
"• Edited example.txt (+1 -1) "
" 1 line one "
" 2 -line two "
" 2 +line two changed "
" 3 line three "
" "
" "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,16 @@
---
source: tui/src/diff_render.rs
expression: terminal.backend()
---
"• Change Approved example.txt (+1 -1) "
" "
" "
" "
" "
" "
" "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,15 @@
---
source: tui/src/diff_render.rs
assertion_line: 748
expression: terminal.backend()
---
"• Edited abs_old.rs → abs_new.rs (+1 -1) "
" 1 -X "
" 1 +X changed "
" 2 Y "
" "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,17 @@
---
source: tui/src/diff_render.rs
assertion_line: 748
expression: terminal.backend()
---
"• Edited long_example.txt (+1 -1) "
" 1 line 1 "
" 2 -short "
" 2 +short this_is_a_very_long_modified_line_that_should_wrap_acro "
" ss_multiple_terminal_columns_and_continue_even_further_beyond "
" _eighty_columns_to_force_multiple_wraps "
" 3 line 3 "
" "
" "
" "
" "
" "

View File

@@ -0,0 +1,16 @@
---
source: tui/src/diff_render.rs
expression: text
---
1 1
2 -2
2 +added long line w
hich wraps and_if
_there_is_a_long_
token_it_will_be_
broken
3 3
4 -4
4 +4 context line wh
ich also wraps ac
ross

View File

@@ -0,0 +1,17 @@
---
source: tui/src/diff_render.rs
assertion_line: 748
expression: terminal.backend()
---
"• Edited old_name.rs → new_name.rs (+1 -1) "
" 1 A "
" 2 -B "
" 2 +B changed "
" 3 C "
" "
" "
" "
" "
" "
" "
" "

View File

@@ -1,9 +1,9 @@
---
source: tui/src/diff_render.rs
assertion_line: 765
expression: terminal.backend()
---
"proposed patch to 1 file (+1 -1) "
" └ example.txt "
"• Proposed Change example.txt (+1 -1) "
" 1 "
" 2 -Y "
" 2 +Y changed "
@@ -12,3 +12,4 @@ expression: terminal.backend()
" "
" "
" "
" "

View File

@@ -1,12 +1,13 @@
---
source: tui/src/diff_render.rs
assertion_line: 765
expression: terminal.backend()
---
"proposed patch to 1 file (+1 -1) "
" └ README.md "
"• Proposed Change README.md (+1 -1) "
" 1 -# Codex CLI (Rust Implementation) "
" 1 +# Codex CLI (Rust Implementation) banana "
" "
" "
" "
" "
" "

View File

@@ -1,9 +1,9 @@
---
source: tui/src/diff_render.rs
assertion_line: 765
expression: terminal.backend()
---
"proposed patch to 1 file (+1 -1) "
" └ src/lib.rs → src/lib_new.rs "
"• Proposed Change src/lib.rs → src/lib_new.rs (+1 -1) "
" 1 line one "
" 2 -line two "
" 2 +line two changed "
@@ -14,3 +14,4 @@ expression: terminal.backend()
" "
" "
" "
" "

View File

@@ -1,9 +1,9 @@
---
source: tui/src/diff_render.rs
assertion_line: 765
expression: terminal.backend()
---
"proposed patch to 1 file (+2 -2) "
" └ example.txt "
"• Proposed Change example.txt (+2 -2) "
" 1 line 1 "
" 2 -line 2 "
" 2 +line two changed "
@@ -18,3 +18,4 @@ expression: terminal.backend()
" 9 +line nine changed "
" 10 line 10 "
" "
" "

View File

@@ -0,0 +1,6 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Explored
└ Read auth.rs, shimmer.rs

View File

@@ -0,0 +1,7 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Explored
└ Search shimmer_spans
Read shimmer.rs, status_indicator_widget.rs

View File

@@ -0,0 +1,8 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Explored
└ Search shimmer_spans
Read shimmer.rs
Read status_indicator_widget.rs

View File

@@ -0,0 +1,9 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran
└ first_token_is_long_
enough_to_wrap
second_token_is_also
_long_enough_to_wrap

View File

@@ -0,0 +1,7 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran
└ echo one
echo two

View File

@@ -0,0 +1,9 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran
└ set -o pipefail
cargo test
--all-features
--quiet

View File

@@ -0,0 +1,20 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Updated Plan
└ Ill update Grafana call
error handling by adding
retries and clearer
messages when the backend is
unreachable.
✔ Investigate existing error
paths and logging around
HTTP timeouts
□ Harden Grafana client
error handling with retry/
backoff and userfriendly
messages
□ Add tests for transient
failure scenarios and
surfacing to the UI

View File

@@ -0,0 +1,7 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Updated Plan
└ □ Define error taxonomy
□ Implement mapping to user messages

View File

@@ -0,0 +1,14 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran
└ echo
this_is_a_very_long_
single_token_that_wi
ll_wrap_across_the_a
vailable_width
error: first line on
stderr
error: second line on
stderr

View File

@@ -0,0 +1,5 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran echo ok

View File

@@ -0,0 +1,9 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran
└ a_very_long_toke
n_without_spaces
_to_force_wrappi
ng

View File

@@ -0,0 +1,15 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
• Ran seq 1 10 1>&2 && false
└ 1
2
3
4
5
6
7
8
9
10

View File

@@ -0,0 +1,8 @@
---
source: tui/src/history_cell.rs
expression: rendered
---
▌one two
▌three four
▌five six
▌seven

View File

@@ -1,3 +1,5 @@
use crate::history_cell;
use crate::history_cell::HistoryCell;
use codex_core::config::Config;
use ratatui::text::Line;
@@ -6,7 +8,7 @@ use super::StreamState;
/// Sink for history insertions and animation control.
pub(crate) trait HistorySink {
fn insert_history(&self, lines: Vec<Line<'static>>);
fn insert_history_cell(&self, cell: Box<dyn HistoryCell>);
fn start_commit_animation(&self);
fn stop_commit_animation(&self);
}
@@ -15,9 +17,9 @@ pub(crate) trait HistorySink {
pub(crate) struct AppEventHistorySink(pub(crate) crate::app_event_sender::AppEventSender);
impl HistorySink for AppEventHistorySink {
fn insert_history(&self, lines: Vec<Line<'static>>) {
fn insert_history_cell(&self, cell: Box<dyn crate::history_cell::HistoryCell>) {
self.0
.send(crate::app_event::AppEvent::InsertHistoryLines(lines))
.send(crate::app_event::AppEvent::InsertHistoryCell(cell))
}
fn start_commit_animation(&self) {
self.0
@@ -66,10 +68,6 @@ impl StreamController {
// leave header state unchanged; caller decides when to reset
}
fn emit_header_if_needed(&mut self, out_lines: &mut Lines) -> bool {
self.header.maybe_emit(out_lines)
}
/// Begin an answer stream. Does not emit header yet; it is emitted on first commit.
pub(crate) fn begin(&mut self, _sink: &impl HistorySink) {
// Starting a new stream cancels any pending finish-from-previous-stream animation.
@@ -124,10 +122,11 @@ impl StreamController {
out_lines.extend(step.history);
}
if !out_lines.is_empty() {
let mut lines_with_header: Lines = Vec::new();
self.emit_header_if_needed(&mut lines_with_header);
lines_with_header.extend(out_lines);
sink.insert_history(lines_with_header);
// Insert as a HistoryCell so display drops the header while transcript keeps it.
sink.insert_history_cell(Box::new(history_cell::AgentMessageCell::new(
out_lines,
self.header.maybe_emit_header(),
)));
}
// Cleanup
@@ -159,11 +158,10 @@ impl StreamController {
}
let step = { self.state.step() };
if !step.history.is_empty() {
let mut lines: Lines = Vec::new();
self.emit_header_if_needed(&mut lines);
let mut out = lines;
out.extend(step.history);
sink.insert_history(out);
sink.insert_history_cell(Box::new(history_cell::AgentMessageCell::new(
step.history,
self.header.maybe_emit_header(),
)));
}
let is_idle = self.state.is_idle();
@@ -244,8 +242,9 @@ mod tests {
}
}
impl HistorySink for TestSink {
fn insert_history(&self, lines: Vec<Line<'static>>) {
self.lines.borrow_mut().push(lines);
fn insert_history_cell(&self, cell: Box<dyn crate::history_cell::HistoryCell>) {
// For tests, store the transcript representation of the cell.
self.lines.borrow_mut().push(cell.transcript_lines());
}
fn start_commit_animation(&self) {}
fn stop_commit_animation(&self) {}

View File

@@ -62,20 +62,13 @@ impl HeaderEmitter {
self.emitted_this_turn = false;
}
pub(crate) fn maybe_emit(&mut self, out_lines: &mut Vec<ratatui::text::Line<'static>>) -> bool {
pub(crate) fn maybe_emit_header(&mut self) -> bool {
if !self.emitted_in_stream && !self.emitted_this_turn {
// Add a leading blank line before the header for visual spacing
out_lines.push(ratatui::text::Line::from(""));
out_lines.push(render_header_line());
self.emitted_in_stream = true;
self.emitted_this_turn = true;
return true;
true
} else {
false
}
false
}
}
fn render_header_line() -> ratatui::text::Line<'static> {
use ratatui::style::Stylize;
ratatui::text::Line::from("codex".magenta().bold())
}

View File

@@ -510,6 +510,7 @@ impl Tui {
}
}
// Use synchronized update via backend instead of stdout()
std::io::stdout().sync_update(|_| {
#[cfg(unix)]
{
@@ -560,8 +561,7 @@ impl Tui {
}
terminal.draw(|frame| {
draw_fn(frame);
})?;
Ok(())
})
})?
}
}

View File

@@ -29,6 +29,7 @@ use ratatui::widgets::Wrap;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::exec_command::strip_bash_lc_and_escape;
use crate::history_cell;
/// Request coming from the agent that needs user approval.
pub(crate) enum ApprovalRequest {
@@ -258,12 +259,11 @@ impl UserApprovalWidget {
}
fn send_decision_with_feedback(&mut self, decision: ReviewDecision, feedback: String) {
let mut lines: Vec<Line<'static>> = vec![Line::from("")];
match &self.approval_request {
ApprovalRequest::Exec { command, .. } => {
let cmd = strip_bash_lc_and_escape(command);
let mut cmd_span: Span = cmd.clone().into();
cmd_span.style = cmd_span.style.add_modifier(Modifier::DIM);
// TODO: move this rendering into history_cell.
let mut lines: Vec<Line<'static>> = vec![];
// Result line based on decision.
match decision {
@@ -316,18 +316,22 @@ impl UserApprovalWidget {
));
}
}
if !feedback.trim().is_empty() {
lines.push(Line::from("feedback:"));
for l in feedback.lines() {
lines.push(Line::from(l.to_string()));
}
}
self.app_event_tx.send(AppEvent::InsertHistoryCell(Box::new(
history_cell::new_user_approval_decision(lines),
)));
}
ApprovalRequest::ApplyPatch { .. } => {
lines.push(Line::from(format!("patch approval decision: {decision:?}")));
// No history line for patch approval decisions.
}
}
if !feedback.trim().is_empty() {
lines.push(Line::from("feedback:"));
for l in feedback.lines() {
lines.push(Line::from(l.to_string()));
}
}
self.app_event_tx.send(AppEvent::InsertHistoryLines(lines));
let op = match &self.approval_request {
ApprovalRequest::Exec { id, .. } => Op::ExecApproval {

View File

@@ -1,208 +0,0 @@
To get started, describe a task or try one of these commands:
/init - create an AGENTS.md file with instructions for Codex
/status - show current session configuration and token usage
/approvals - choose what Codex can do without approval
/model - choose what model and reasoning effort to use
codex
Im going to scan the workspace and Cargo manifests to see build profiles and
dependencies that impact binary size. Then Ill summarize the main causes.
_
✓ ls -la
└ total 6696
drwxr-xr-x@ 39 easong staff 1248 Aug 9 08:49 .
drwxr-xr-x@ 29 easong staff 928 Aug 9 08:50 ..
-rw-r--r--@ 1 easong staff 10244 Aug 8 21:13 .DS_Store
-rw-r--r--@ 1 easong staff 212 Aug 4 18:49 .gitignore
… +30 lines
-rw-r--r--@ 1 easong staff 9971 Aug 8 23:07 simpler.md
drwxr-xr-x@ 6 easong staff 192 Aug 4 22:40 target
drwxr-xr-x@ 6 easong staff 192 Aug 7 19:20 tui
-rw-r--r--@ 1 easong staff 8639 Aug 6 23:30 typewriter.md
-rw-r--r--@ 1 easong staff 7673 Aug 6 21:58 wrap-plan.md
✓ sed -n '1,200p' Cargo.toml
└ [workspace]
members = [
"ansi-escape",
"apply-patch",
"arg0",
… +38 lines
codegen-units = 1
[patch.crates-io]
# ratatui = { path = "../../ratatui" }
ratatui = { git = "https://github.com/nornagon/ratatui", branch =
"nornagon-v0.29.0-patch" }
✓ for d in ansi-escape apply-patch arg0 cli common core exec execpolicy
file-search linux-sandbox login mcp-client mcp-server mcp-types ollama tui; do
echo "--- $d/Cargo.toml"; sed -n '1,200p' $d/Cargo.toml; echo; done
└ --- ansi-escape/Cargo.toml
[package]
edition = "2024"
name = "codex-ansi-escape"
version = { workspace = true }
… +237 lines
"process",
"rt-multi-thread",
"signal",
] }
tracing = { version
✓ sed -n '1,200p' execpolicy/Cargo.toml
└ [package]
name = "codex-execpolicy"
version = { workspace = true }
edition = "2024"
… +23 lines
serde_json = "1.0.142"
serde_with = { version = "3", features = ["macros"] }
[dev-dependencies]
tempfile = "3.13.0"
✓ sed -n '1,200p' file-search/Cargo.toml
└ [package]
edition = "2024"
name = "codex-file-search"
version = { workspace = true }
… +11 lines
ignore = "0.4.23"
nucleo-matcher = "0.3.1"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.142"
tokio = { version = "1", features = ["full"] }
✓ sed -n '1,200p' linux-sandbox/Cargo.toml
└ [package]
edition = "2024"
name = "codex-linux-sandbox"
version = { workspace = true }
… +24 lines
"macros",
"process",
"rt-multi-thread",
"signal",
] }
✓ sed -n '1,200p' login/Cargo.toml
└ [package]
edition = "2024"
name = "codex-login"
version = { workspace = true }
… +17 lines
] }
[dev-dependencies]
pretty_assertions = "1.4.1"
tempfile = "3"
✓ sed -n '1,200p' mcp-client/Cargo.toml
└ [package]
name = "codex-mcp-client"
version = { workspace = true }
edition = "2024"
… +13 lines
"process",
"rt-multi-thread",
"sync",
"time",
] }
✓ sed -n '1,200p' mcp-server/Cargo.toml
└ [package]
edition = "2024"
name = "codex-mcp-server"
version = { workspace = true }
… +35 lines
mcp_test_support = { path = "tests/common" }
pretty_assertions = "1.4.1"
tempfile = "3"
tokio-test = "0.4"
wiremock = "0.6"
✓ sed -n '1,200p' mcp-types/Cargo.toml
└ [package]
edition = "2024"
name = "mcp-types"
version = { workspace = true }
… +1 lines
workspace = true
[dependencies]
serde = { version = "1", features = ["derive"] }
serde_json = "1"
✓ sed -n '1,200p' ollama/Cargo.toml
└ [package]
edition = "2024"
name = "codex-ollama"
version = { workspace = true }
… +22 lines
tracing = { version = "0.1.41", features = ["log"] }
wiremock = "0.6"
[dev-dependencies]
tempfile = "3"
✓ sed -n '1,200p' tui/Cargo.toml
└ [package]
edition = "2024"
name = "codex-tui"
version = { workspace = true }
… +74 lines
chrono = { version = "0.4", features = ["serde"] }
insta = "1.43.1"
pretty_assertions = "1"
rand = "0.8"
vt100 = "0.16.2"
✓ sed -n '1,200p' chatgpt/Cargo.toml
└ [package]
edition = "2024"
name = "codex-chatgpt"
version = { workspace = true }
… +11 lines
serde_json = "1"
tokio = { version = "1", features = ["full"] }
[dev-dependencies]
tempfile = "3"
codex
Heres whats driving size in this workspaces binaries.
Main Causes
- Static linking style: Each bin (codex, codex-tui, codex-exec,
codex-mcp-server, etc.) statically links its full dependency graph, so common
code isnt shared at runtime across executables.
- Heavy deps (HTTP/TLS): reqwest brings in Hyper, HTTP/2, compressors, and a TLS
stack (rustls by default; OpenSSL on musl). In core, login, tui, and ollama you
enable reqwest with json/stream, which still pulls a large transitive set.
- Image/terminal stack: codex-tui includes image (with jpeg), ratatui,
crossterm, and ratatui-image, which together contribute significant code size
(decoders, pixel ops, terminal backends).
- Parsers/VMs: tree-sitter + tree-sitter-bash (in core and apply-patch) and
starlark (in execpolicy) include sizeable parser/VM tables and runtimes.
- Tokio runtime: Broad tokio features (rt-multi-thread, macros, process, signal)
across many crates inflate code size even if only subsets are used per bin.
- Panic + backtraces: Default panic = unwind and backtrace support keep
unwinding tables and symbols that add weight.
- Per-target OpenSSL (musl): For *-unknown-linux-musl, core enables openssl-sys
with vendored, compiling OpenSSL into the binary—this adds multiple megabytes
per executable.
Build-Mode Notes
- Release settings: You use lto = "fat" and codegen-units = 1 (good for size),
but strip = "symbols" keeps debuginfo. Debuginfo is often the largest single
contributor; if you build in release with that setting, binaries can still be
large.
- Debug builds: cargo build (dev profile) includes full debuginfo, no LTO, and
assertions—outputs are much larger than cargo build --release.
If you want, I can outline targeted trims (e.g., strip = "debuginfo", opt-level
= "z", panic abort, tighter tokio/reqwest features) and estimate impact per
binary.