chore: upgrade to Rust 1.89 (#2465)

Codex created this PR from the following prompt:

> upgrade this entire repo to Rust 1.89. Note that this requires
updating codex-rs/rust-toolchain.toml as well as the workflows in
.github/. Make sure that things are "clippy clean" as this change will
likely uncover new Clippy errors. `just fmt` and `cargo clippy --tests`
are sufficient to check for correctness

Note this modifies a lot of lines because it folds nested `if`
statements using `&&`.

---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/2465).
* #2467
* __->__ #2465
This commit is contained in:
Michael Bolin
2025-08-19 13:22:02 -07:00
committed by GitHub
parent aafa00dbe0
commit 50c48e88f5
37 changed files with 504 additions and 521 deletions

View File

@@ -39,7 +39,7 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@1.88 - uses: dtolnay/rust-toolchain@1.89
with: with:
targets: x86_64-unknown-linux-gnu targets: x86_64-unknown-linux-gnu
components: clippy components: clippy

View File

@@ -57,7 +57,7 @@ jobs:
working-directory: codex-rs working-directory: codex-rs
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@1.88 - uses: dtolnay/rust-toolchain@1.89
with: with:
components: rustfmt components: rustfmt
- name: cargo fmt - name: cargo fmt
@@ -112,7 +112,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@1.88 - uses: dtolnay/rust-toolchain@1.89
with: with:
targets: ${{ matrix.target }} targets: ${{ matrix.target }}
components: clippy components: clippy

View File

@@ -75,7 +75,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v5
- uses: dtolnay/rust-toolchain@1.88 - uses: dtolnay/rust-toolchain@1.89
with: with:
targets: ${{ matrix.target }} targets: ${{ matrix.target }}

View File

@@ -415,12 +415,12 @@ fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result<AffectedPaths> {
for hunk in hunks { for hunk in hunks {
match hunk { match hunk {
Hunk::AddFile { path, contents } => { Hunk::AddFile { path, contents } => {
if let Some(parent) = path.parent() { if let Some(parent) = path.parent()
if !parent.as_os_str().is_empty() { && !parent.as_os_str().is_empty()
std::fs::create_dir_all(parent).with_context(|| { {
format!("Failed to create parent directories for {}", path.display()) std::fs::create_dir_all(parent).with_context(|| {
})?; format!("Failed to create parent directories for {}", path.display())
} })?;
} }
std::fs::write(path, contents) std::fs::write(path, contents)
.with_context(|| format!("Failed to write file {}", path.display()))?; .with_context(|| format!("Failed to write file {}", path.display()))?;
@@ -439,15 +439,12 @@ fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result<AffectedPaths> {
let AppliedPatch { new_contents, .. } = let AppliedPatch { new_contents, .. } =
derive_new_contents_from_chunks(path, chunks)?; derive_new_contents_from_chunks(path, chunks)?;
if let Some(dest) = move_path { if let Some(dest) = move_path {
if let Some(parent) = dest.parent() { if let Some(parent) = dest.parent()
if !parent.as_os_str().is_empty() { && !parent.as_os_str().is_empty()
std::fs::create_dir_all(parent).with_context(|| { {
format!( std::fs::create_dir_all(parent).with_context(|| {
"Failed to create parent directories for {}", format!("Failed to create parent directories for {}", dest.display())
dest.display() })?;
)
})?;
}
} }
std::fs::write(dest, new_contents) std::fs::write(dest, new_contents)
.with_context(|| format!("Failed to write file {}", dest.display()))?; .with_context(|| format!("Failed to write file {}", dest.display()))?;
@@ -529,9 +526,12 @@ fn compute_replacements(
// If a chunk has a `change_context`, we use seek_sequence to find it, then // If a chunk has a `change_context`, we use seek_sequence to find it, then
// adjust our `line_index` to continue from there. // adjust our `line_index` to continue from there.
if let Some(ctx_line) = &chunk.change_context { if let Some(ctx_line) = &chunk.change_context {
if let Some(idx) = if let Some(idx) = seek_sequence::seek_sequence(
seek_sequence::seek_sequence(original_lines, &[ctx_line.clone()], line_index, false) original_lines,
{ std::slice::from_ref(ctx_line),
line_index,
false,
) {
line_index = idx + 1; line_index = idx + 1;
} else { } else {
return Err(ApplyPatchError::ComputeReplacements(format!( return Err(ApplyPatchError::ComputeReplacements(format!(

View File

@@ -89,10 +89,10 @@ const ILLEGAL_ENV_VAR_PREFIX: &str = "CODEX_";
/// Security: Do not allow `.env` files to create or modify any variables /// Security: Do not allow `.env` files to create or modify any variables
/// with names starting with `CODEX_`. /// with names starting with `CODEX_`.
fn load_dotenv() { fn load_dotenv() {
if let Ok(codex_home) = codex_core::config::find_codex_home() { if let Ok(codex_home) = codex_core::config::find_codex_home()
if let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env")) { && let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env"))
set_filtered(iter); {
} set_filtered(iter);
} }
if let Ok(iter) = dotenvy::dotenv_iter() { if let Ok(iter) = dotenvy::dotenv_iter() {

View File

@@ -66,12 +66,12 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
Ok(api_key) => { Ok(api_key) => {
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key)); eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) { if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR)
if env_api_key == api_key { && env_api_key == api_key
eprintln!( {
" API loaded from OPENAI_API_KEY environment variable or .env file" eprintln!(
); " API loaded from OPENAI_API_KEY environment variable or .env file"
} );
} }
std::process::exit(0); std::process::exit(0);
} }

View File

@@ -290,13 +290,12 @@ async fn process_chat_sse<S>(
.get("delta") .get("delta")
.and_then(|d| d.get("content")) .and_then(|d| d.get("content"))
.and_then(|c| c.as_str()) .and_then(|c| c.as_str())
&& !content.is_empty()
{ {
if !content.is_empty() { assistant_text.push_str(content);
assistant_text.push_str(content); let _ = tx_event
let _ = tx_event .send(Ok(ResponseEvent::OutputTextDelta(content.to_string())))
.send(Ok(ResponseEvent::OutputTextDelta(content.to_string()))) .await;
.await;
}
} }
// Forward any reasoning/thinking deltas if present. // Forward any reasoning/thinking deltas if present.
@@ -333,27 +332,25 @@ async fn process_chat_sse<S>(
.get("delta") .get("delta")
.and_then(|d| d.get("tool_calls")) .and_then(|d| d.get("tool_calls"))
.and_then(|tc| tc.as_array()) .and_then(|tc| tc.as_array())
&& let Some(tool_call) = tool_calls.first()
{ {
if let Some(tool_call) = tool_calls.first() { // Mark that we have an active function call in progress.
// Mark that we have an active function call in progress. fn_call_state.active = true;
fn_call_state.active = true;
// Extract call_id if present. // Extract call_id if present.
if let Some(id) = tool_call.get("id").and_then(|v| v.as_str()) { if let Some(id) = tool_call.get("id").and_then(|v| v.as_str()) {
fn_call_state.call_id.get_or_insert_with(|| id.to_string()); fn_call_state.call_id.get_or_insert_with(|| id.to_string());
}
// Extract function details if present.
if let Some(function) = tool_call.get("function") {
if let Some(name) = function.get("name").and_then(|n| n.as_str()) {
fn_call_state.name.get_or_insert_with(|| name.to_string());
} }
// Extract function details if present. if let Some(args_fragment) = function.get("arguments").and_then(|a| a.as_str())
if let Some(function) = tool_call.get("function") { {
if let Some(name) = function.get("name").and_then(|n| n.as_str()) { fn_call_state.arguments.push_str(args_fragment);
fn_call_state.name.get_or_insert_with(|| name.to_string());
}
if let Some(args_fragment) =
function.get("arguments").and_then(|a| a.as_str())
{
fn_call_state.arguments.push_str(args_fragment);
}
} }
} }
} }
@@ -491,15 +488,14 @@ where
// Only use the final assistant message if we have not // Only use the final assistant message if we have not
// seen any deltas; otherwise, deltas already built the // seen any deltas; otherwise, deltas already built the
// cumulative text and this would duplicate it. // cumulative text and this would duplicate it.
if this.cumulative.is_empty() { if this.cumulative.is_empty()
if let crate::models::ResponseItem::Message { content, .. } = &item { && let crate::models::ResponseItem::Message { content, .. } = &item
if let Some(text) = content.iter().find_map(|c| match c { && let Some(text) = content.iter().find_map(|c| match c {
crate::models::ContentItem::OutputText { text } => Some(text), crate::models::ContentItem::OutputText { text } => Some(text),
_ => None, _ => None,
}) { })
this.cumulative.push_str(text); {
} this.cumulative.push_str(text);
}
} }
// Swallow assistant message here; emit on Completed. // Swallow assistant message here; emit on Completed.

View File

@@ -544,10 +544,10 @@ impl Session {
pub fn remove_task(&self, sub_id: &str) { pub fn remove_task(&self, sub_id: &str) {
let mut state = self.state.lock_unchecked(); let mut state = self.state.lock_unchecked();
if let Some(task) = &state.current_task { if let Some(task) = &state.current_task
if task.sub_id == sub_id { && task.sub_id == sub_id
state.current_task.take(); {
} state.current_task.take();
} }
} }
@@ -1239,18 +1239,18 @@ async fn submission_loop(
// Gracefully flush and shutdown rollout recorder on session end so tests // Gracefully flush and shutdown rollout recorder on session end so tests
// that inspect the rollout file do not race with the background writer. // that inspect the rollout file do not race with the background writer.
let recorder_opt = sess.rollout.lock_unchecked().take(); let recorder_opt = sess.rollout.lock_unchecked().take();
if let Some(rec) = recorder_opt { if let Some(rec) = recorder_opt
if let Err(e) = rec.shutdown().await { && let Err(e) = rec.shutdown().await
warn!("failed to shutdown rollout recorder: {e}"); {
let event = Event { warn!("failed to shutdown rollout recorder: {e}");
id: sub.id.clone(), let event = Event {
msg: EventMsg::Error(ErrorEvent { id: sub.id.clone(),
message: "Failed to shutdown rollout recorder".to_string(), msg: EventMsg::Error(ErrorEvent {
}), message: "Failed to shutdown rollout recorder".to_string(),
}; }),
if let Err(e) = sess.tx_event.send(event).await { };
warn!("failed to send error message: {e:?}"); if let Err(e) = sess.tx_event.send(event).await {
} warn!("failed to send error message: {e:?}");
} }
} }

View File

@@ -759,10 +759,10 @@ fn default_model() -> String {
pub fn find_codex_home() -> std::io::Result<PathBuf> { pub fn find_codex_home() -> std::io::Result<PathBuf> {
// Honor the `CODEX_HOME` environment variable when it is set to allow users // Honor the `CODEX_HOME` environment variable when it is set to allow users
// (and tests) to override the default location. // (and tests) to override the default location.
if let Ok(val) = std::env::var("CODEX_HOME") { if let Ok(val) = std::env::var("CODEX_HOME")
if !val.is_empty() { && !val.is_empty()
return PathBuf::from(val).canonicalize(); {
} return PathBuf::from(val).canonicalize();
} }
let mut p = home_dir().ok_or_else(|| { let mut p = home_dir().ok_or_else(|| {

View File

@@ -51,33 +51,30 @@ pub async fn collect_git_info(cwd: &Path) -> Option<GitInfo> {
}; };
// Process commit hash // Process commit hash
if let Some(output) = commit_result { if let Some(output) = commit_result
if output.status.success() { && output.status.success()
if let Ok(hash) = String::from_utf8(output.stdout) { && let Ok(hash) = String::from_utf8(output.stdout)
git_info.commit_hash = Some(hash.trim().to_string()); {
} git_info.commit_hash = Some(hash.trim().to_string());
}
} }
// Process branch name // Process branch name
if let Some(output) = branch_result { if let Some(output) = branch_result
if output.status.success() { && output.status.success()
if let Ok(branch) = String::from_utf8(output.stdout) { && let Ok(branch) = String::from_utf8(output.stdout)
let branch = branch.trim(); {
if branch != "HEAD" { let branch = branch.trim();
git_info.branch = Some(branch.to_string()); if branch != "HEAD" {
} git_info.branch = Some(branch.to_string());
}
} }
} }
// Process repository URL // Process repository URL
if let Some(output) = url_result { if let Some(output) = url_result
if output.status.success() { && output.status.success()
if let Ok(url) = String::from_utf8(output.stdout) { && let Ok(url) = String::from_utf8(output.stdout)
git_info.repository_url = Some(url.trim().to_string()); {
} git_info.repository_url = Some(url.trim().to_string());
}
} }
Some(git_info) Some(git_info)

View File

@@ -12,20 +12,17 @@ pub fn is_known_safe_command(command: &[String]) -> bool {
// introduce side effects ( "&&", "||", ";", and "|" ). If every // introduce side effects ( "&&", "||", ";", and "|" ). If every
// individual command in the script is itself a knownsafe command, then // individual command in the script is itself a knownsafe command, then
// the composite expression is considered safe. // the composite expression is considered safe.
if let [bash, flag, script] = command { if let [bash, flag, script] = command
if bash == "bash" && flag == "-lc" { && bash == "bash"
if let Some(tree) = try_parse_bash(script) { && flag == "-lc"
if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) { && let Some(tree) = try_parse_bash(script)
if !all_commands.is_empty() && let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script)
&& all_commands && !all_commands.is_empty()
.iter() && all_commands
.all(|cmd| is_safe_to_call_with_exec(cmd)) .iter()
{ .all(|cmd| is_safe_to_call_with_exec(cmd))
return true; {
} return true;
}
}
}
} }
false false

View File

@@ -167,10 +167,10 @@ impl ModelProviderInfo {
if let Some(env_headers) = &self.env_http_headers { if let Some(env_headers) = &self.env_http_headers {
for (header, env_var) in env_headers { for (header, env_var) in env_headers {
if let Ok(val) = std::env::var(env_var) { if let Ok(val) = std::env::var(env_var)
if !val.trim().is_empty() { && !val.trim().is_empty()
builder = builder.header(header, val); {
} builder = builder.header(header, val);
} }
} }
} }

View File

@@ -420,11 +420,11 @@ fn sanitize_json_schema(value: &mut JsonValue) {
} }
JsonValue::Object(map) => { JsonValue::Object(map) => {
// First, recursively sanitize known nested schema holders // First, recursively sanitize known nested schema holders
if let Some(props) = map.get_mut("properties") { if let Some(props) = map.get_mut("properties")
if let Some(props_map) = props.as_object_mut() { && let Some(props_map) = props.as_object_mut()
for (_k, v) in props_map.iter_mut() { {
sanitize_json_schema(v); for (_k, v) in props_map.iter_mut() {
} sanitize_json_schema(v);
} }
} }
if let Some(items) = map.get_mut("items") { if let Some(items) = map.get_mut("items") {
@@ -444,18 +444,18 @@ fn sanitize_json_schema(value: &mut JsonValue) {
.map(|s| s.to_string()); .map(|s| s.to_string());
// If type is an array (union), pick first supported; else leave to inference // If type is an array (union), pick first supported; else leave to inference
if ty.is_none() { if ty.is_none()
if let Some(JsonValue::Array(types)) = map.get("type") { && let Some(JsonValue::Array(types)) = map.get("type")
for t in types { {
if let Some(tt) = t.as_str() { for t in types {
if matches!( if let Some(tt) = t.as_str()
tt, && matches!(
"object" | "array" | "string" | "number" | "integer" | "boolean" tt,
) { "object" | "array" | "string" | "number" | "integer" | "boolean"
ty = Some(tt.to_string()); )
break; {
} ty = Some(tt.to_string());
} break;
} }
} }
} }

View File

@@ -1196,10 +1196,10 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
} }
// echo ... && ...rest => ...rest // echo ... && ...rest => ...rest
if let ParsedCommand::Unknown { cmd } = &commands[0] { if let ParsedCommand::Unknown { cmd } = &commands[0]
if shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("echo")) { && shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("echo"))
return Some(commands[1..].to_vec()); {
} return Some(commands[1..].to_vec());
} }
// cd foo && [any Test command] => [any Test command] // cd foo && [any Test command] => [any Test command]
@@ -1208,17 +1208,15 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("cd")) shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("cd"))
} }
_ => false, _ => false,
}) { }) && commands
if commands .iter()
.iter() .skip(idx + 1)
.skip(idx + 1) .any(|pc| matches!(pc, ParsedCommand::Test { .. }))
.any(|pc| matches!(pc, ParsedCommand::Test { .. })) {
{ let mut out = Vec::with_capacity(commands.len() - 1);
let mut out = Vec::with_capacity(commands.len() - 1); out.extend_from_slice(&commands[..idx]);
out.extend_from_slice(&commands[..idx]); out.extend_from_slice(&commands[idx + 1..]);
out.extend_from_slice(&commands[idx + 1..]); return Some(out);
return Some(out);
}
} }
// cmd || true => cmd // cmd || true => cmd
@@ -1564,127 +1562,124 @@ fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
if bash != "bash" || flag != "-lc" { if bash != "bash" || flag != "-lc" {
return None; return None;
} }
if let Some(tree) = try_parse_bash(script) { if let Some(tree) = try_parse_bash(script)
if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) { && let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script)
if !all_commands.is_empty() { && !all_commands.is_empty()
let script_tokens = shlex_split(script) {
.unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()]); let script_tokens = shlex_split(script)
// Strip small formatting helpers (e.g., head/tail/awk/wc/etc) so we .unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()]);
// bias toward the primary command when pipelines are present. // Strip small formatting helpers (e.g., head/tail/awk/wc/etc) so we
// First, drop obvious small formatting helpers (e.g., wc/awk/etc). // bias toward the primary command when pipelines are present.
let had_multiple_commands = all_commands.len() > 1; // First, drop obvious small formatting helpers (e.g., wc/awk/etc).
// The bash AST walker yields commands in right-to-left order for let had_multiple_commands = all_commands.len() > 1;
// connector/pipeline sequences. Reverse to reflect actual execution order. // The bash AST walker yields commands in right-to-left order for
let mut filtered_commands = drop_small_formatting_commands(all_commands); // connector/pipeline sequences. Reverse to reflect actual execution order.
filtered_commands.reverse(); let mut filtered_commands = drop_small_formatting_commands(all_commands);
if filtered_commands.is_empty() { filtered_commands.reverse();
return Some(vec![ParsedCommand::Unknown { if filtered_commands.is_empty() {
cmd: script.clone(), return Some(vec![ParsedCommand::Unknown {
}]); cmd: script.clone(),
} }]);
let mut commands: Vec<ParsedCommand> = filtered_commands
.into_iter()
.map(|tokens| summarize_main_tokens(&tokens))
.collect();
if commands.len() > 1 {
commands.retain(|pc| !matches!(pc, ParsedCommand::Noop { .. }));
}
if commands.len() == 1 {
// If we reduced to a single command, attribute the full original script
// for clearer UX in file-reading and listing scenarios, or when there were
// no connectors in the original script. For search commands that came from
// a pipeline (e.g. `rg --files | sed -n`), keep only the primary command.
let had_connectors = had_multiple_commands
|| script_tokens
.iter()
.any(|t| t == "|" || t == "&&" || t == "||" || t == ";");
commands = commands
.into_iter()
.map(|pc| match pc {
ParsedCommand::Read { name, cmd, .. } => {
if had_connectors {
let has_pipe = script_tokens.iter().any(|t| t == "|");
let has_sed_n = script_tokens.windows(2).any(|w| {
w.first().map(|s| s.as_str()) == Some("sed")
&& w.get(1).map(|s| s.as_str()) == Some("-n")
});
if has_pipe && has_sed_n {
ParsedCommand::Read {
cmd: script.clone(),
name,
}
} else {
ParsedCommand::Read {
cmd: cmd.clone(),
name,
}
}
} else {
ParsedCommand::Read {
cmd: shlex_join(&script_tokens),
name,
}
}
}
ParsedCommand::ListFiles { path, cmd, .. } => {
if had_connectors {
ParsedCommand::ListFiles {
cmd: cmd.clone(),
path,
}
} else {
ParsedCommand::ListFiles {
cmd: shlex_join(&script_tokens),
path,
}
}
}
ParsedCommand::Search {
query, path, cmd, ..
} => {
if had_connectors {
ParsedCommand::Search {
cmd: cmd.clone(),
query,
path,
}
} else {
ParsedCommand::Search {
cmd: shlex_join(&script_tokens),
query,
path,
}
}
}
ParsedCommand::Format {
tool, targets, cmd, ..
} => ParsedCommand::Format {
cmd: cmd.clone(),
tool,
targets,
},
ParsedCommand::Test { cmd, .. } => {
ParsedCommand::Test { cmd: cmd.clone() }
}
ParsedCommand::Lint {
tool, targets, cmd, ..
} => ParsedCommand::Lint {
cmd: cmd.clone(),
tool,
targets,
},
ParsedCommand::Unknown { .. } => ParsedCommand::Unknown {
cmd: script.clone(),
},
ParsedCommand::Noop { .. } => ParsedCommand::Noop {
cmd: script.clone(),
},
})
.collect();
}
return Some(commands);
}
} }
let mut commands: Vec<ParsedCommand> = filtered_commands
.into_iter()
.map(|tokens| summarize_main_tokens(&tokens))
.collect();
if commands.len() > 1 {
commands.retain(|pc| !matches!(pc, ParsedCommand::Noop { .. }));
}
if commands.len() == 1 {
// If we reduced to a single command, attribute the full original script
// for clearer UX in file-reading and listing scenarios, or when there were
// no connectors in the original script. For search commands that came from
// a pipeline (e.g. `rg --files | sed -n`), keep only the primary command.
let had_connectors = had_multiple_commands
|| script_tokens
.iter()
.any(|t| t == "|" || t == "&&" || t == "||" || t == ";");
commands = commands
.into_iter()
.map(|pc| match pc {
ParsedCommand::Read { name, cmd, .. } => {
if had_connectors {
let has_pipe = script_tokens.iter().any(|t| t == "|");
let has_sed_n = script_tokens.windows(2).any(|w| {
w.first().map(|s| s.as_str()) == Some("sed")
&& w.get(1).map(|s| s.as_str()) == Some("-n")
});
if has_pipe && has_sed_n {
ParsedCommand::Read {
cmd: script.clone(),
name,
}
} else {
ParsedCommand::Read {
cmd: cmd.clone(),
name,
}
}
} else {
ParsedCommand::Read {
cmd: shlex_join(&script_tokens),
name,
}
}
}
ParsedCommand::ListFiles { path, cmd, .. } => {
if had_connectors {
ParsedCommand::ListFiles {
cmd: cmd.clone(),
path,
}
} else {
ParsedCommand::ListFiles {
cmd: shlex_join(&script_tokens),
path,
}
}
}
ParsedCommand::Search {
query, path, cmd, ..
} => {
if had_connectors {
ParsedCommand::Search {
cmd: cmd.clone(),
query,
path,
}
} else {
ParsedCommand::Search {
cmd: shlex_join(&script_tokens),
query,
path,
}
}
}
ParsedCommand::Format {
tool, targets, cmd, ..
} => ParsedCommand::Format {
cmd: cmd.clone(),
tool,
targets,
},
ParsedCommand::Test { cmd, .. } => ParsedCommand::Test { cmd: cmd.clone() },
ParsedCommand::Lint {
tool, targets, cmd, ..
} => ParsedCommand::Lint {
cmd: cmd.clone(),
tool,
targets,
},
ParsedCommand::Unknown { .. } => ParsedCommand::Unknown {
cmd: script.clone(),
},
ParsedCommand::Noop { .. } => ParsedCommand::Noop {
cmd: script.clone(),
},
})
.collect();
}
return Some(commands);
} }
Some(vec![ParsedCommand::Unknown { Some(vec![ParsedCommand::Unknown {
cmd: script.clone(), cmd: script.clone(),

View File

@@ -231,10 +231,10 @@ fn is_write_patch_constrained_to_writable_paths(
if !is_path_writable(path) { if !is_path_writable(path) {
return false; return false;
} }
if let Some(dest) = move_path { if let Some(dest) = move_path
if !is_path_writable(dest) { && !is_path_writable(dest)
return false; {
} return false;
} }
} }
} }

View File

@@ -70,13 +70,13 @@ pub async fn default_user_shell() -> Shell {
} }
let stdout = String::from_utf8_lossy(&o.stdout); let stdout = String::from_utf8_lossy(&o.stdout);
for line in stdout.lines() { for line in stdout.lines() {
if let Some(shell_path) = line.strip_prefix("UserShell: ") { if let Some(shell_path) = line.strip_prefix("UserShell: ")
if shell_path.ends_with("/zsh") { && shell_path.ends_with("/zsh")
return Shell::Zsh(ZshShell { {
shell_path: shell_path.to_string(), return Shell::Zsh(ZshShell {
zshrc_path: format!("{home}/.zshrc"), shell_path: shell_path.to_string(),
}); zshrc_path: format!("{home}/.zshrc"),
} });
} }
} }

View File

@@ -297,13 +297,12 @@ async fn integration_creates_and_checks_session_file() {
Ok(v) => v, Ok(v) => v,
Err(_) => continue, Err(_) => continue,
}; };
if item.get("type").and_then(|t| t.as_str()) == Some("message") { if item.get("type").and_then(|t| t.as_str()) == Some("message")
if let Some(c) = item.get("content") { && let Some(c) = item.get("content")
if c.to_string().contains(&marker) { && c.to_string().contains(&marker)
matching_path = Some(path.to_path_buf()); {
break; matching_path = Some(path.to_path_buf());
} break;
}
} }
} }
} }
@@ -376,13 +375,12 @@ async fn integration_creates_and_checks_session_file() {
let Ok(item) = serde_json::from_str::<serde_json::Value>(line) else { let Ok(item) = serde_json::from_str::<serde_json::Value>(line) else {
continue; continue;
}; };
if item.get("type").and_then(|t| t.as_str()) == Some("message") { if item.get("type").and_then(|t| t.as_str()) == Some("message")
if let Some(c) = item.get("content") { && let Some(c) = item.get("content")
if c.to_string().contains(&marker) { && c.to_string().contains(&marker)
found_message = true; {
break; found_message = true;
} break;
}
} }
} }
assert!( assert!(

View File

@@ -29,9 +29,9 @@ pub(crate) fn handle_last_message(last_agent_message: Option<&str>, output_file:
} }
fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) { fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) {
if let Some(path) = last_message_path { if let Some(path) = last_message_path
if let Err(e) = std::fs::write(path, contents) { && let Err(e) = std::fs::write(path, contents)
eprintln!("Failed to write last message file {path:?}: {e}"); {
} eprintln!("Failed to write last message file {path:?}: {e}");
} }
} }

View File

@@ -214,7 +214,12 @@ system_path=[{fake_cp:?}]
// Only readable folders specified. // Only readable folders specified.
assert_eq!( assert_eq!(
checker.check(valid_exec.clone(), &cwd, &[root_path.clone()], &[]), checker.check(
valid_exec.clone(),
&cwd,
std::slice::from_ref(&root_path),
&[]
),
Err(WriteablePathNotInWriteableFolders { Err(WriteablePathNotInWriteableFolders {
file: dest_path.clone(), file: dest_path.clone(),
folders: vec![] folders: vec![]
@@ -226,8 +231,8 @@ system_path=[{fake_cp:?}]
checker.check( checker.check(
valid_exec.clone(), valid_exec.clone(),
&cwd, &cwd,
&[root_path.clone()], std::slice::from_ref(&root_path),
&[root_path.clone()] std::slice::from_ref(&root_path)
), ),
Ok(cp.clone()), Ok(cp.clone()),
); );
@@ -246,8 +251,8 @@ system_path=[{fake_cp:?}]
checker.check( checker.check(
valid_exec_call_folders_as_args, valid_exec_call_folders_as_args,
&cwd, &cwd,
&[root_path.clone()], std::slice::from_ref(&root_path),
&[root_path.clone()] std::slice::from_ref(&root_path)
), ),
Ok(cp.clone()), Ok(cp.clone()),
); );
@@ -269,8 +274,8 @@ system_path=[{fake_cp:?}]
checker.check( checker.check(
exec_with_parent_of_readable_folder, exec_with_parent_of_readable_folder,
&cwd, &cwd,
&[root_path.clone()], std::slice::from_ref(&root_path),
&[dest_path.clone()] std::slice::from_ref(&dest_path)
), ),
Err(ReadablePathNotInReadableFolders { Err(ReadablePathNotInReadableFolders {
file: root_path.parent().unwrap().to_path_buf(), file: root_path.parent().unwrap().to_path_buf(),

View File

@@ -56,16 +56,16 @@ impl Policy {
} }
for arg in args { for arg in args {
if let Some(regex) = &self.forbidden_substrings_pattern { if let Some(regex) = &self.forbidden_substrings_pattern
if regex.is_match(arg) { && regex.is_match(arg)
return Ok(MatchedExec::Forbidden { {
cause: Forbidden::Arg { return Ok(MatchedExec::Forbidden {
arg: arg.clone(), cause: Forbidden::Arg {
exec_call: exec_call.clone(), arg: arg.clone(),
}, exec_call: exec_call.clone(),
reason: format!("arg `{arg}` contains forbidden substring"), },
}); reason: format!("arg `{arg}` contains forbidden substring"),
} });
} }
} }

View File

@@ -3,12 +3,12 @@ use crate::error::Result;
pub fn parse_sed_command(sed_command: &str) -> Result<()> { pub fn parse_sed_command(sed_command: &str) -> Result<()> {
// For now, we parse only commands like `122,202p`. // For now, we parse only commands like `122,202p`.
if let Some(stripped) = sed_command.strip_suffix("p") { if let Some(stripped) = sed_command.strip_suffix("p")
if let Some((first, rest)) = stripped.split_once(",") { && let Some((first, rest)) = stripped.split_once(",")
if first.parse::<u64>().is_ok() && rest.parse::<u64>().is_ok() { && first.parse::<u64>().is_ok()
return Ok(()); && rest.parse::<u64>().is_ok()
} {
} return Ok(());
} }
Err(Error::SedCommandNotProvablySafe { Err(Error::SedCommandNotProvablySafe {

View File

@@ -228,11 +228,11 @@ pub fn run(
for &Reverse((score, ref line)) in best_list.binary_heap.iter() { for &Reverse((score, ref line)) in best_list.binary_heap.iter() {
if global_heap.len() < limit.get() { if global_heap.len() < limit.get() {
global_heap.push(Reverse((score, line.clone()))); global_heap.push(Reverse((score, line.clone())));
} else if let Some(min_element) = global_heap.peek() { } else if let Some(min_element) = global_heap.peek()
if score > min_element.0.0 { && score > min_element.0.0
global_heap.pop(); {
global_heap.push(Reverse((score, line.clone()))); global_heap.pop();
} global_heap.push(Reverse((score, line.clone())));
} }
} }
} }
@@ -320,11 +320,11 @@ impl BestMatchesList {
if self.binary_heap.len() < self.max_count { if self.binary_heap.len() < self.max_count {
self.binary_heap.push(Reverse((score, line.to_string()))); self.binary_heap.push(Reverse((score, line.to_string())));
} else if let Some(min_element) = self.binary_heap.peek() { } else if let Some(min_element) = self.binary_heap.peek()
if score > min_element.0.0 { && score > min_element.0.0
self.binary_heap.pop(); {
self.binary_heap.push(Reverse((score, line.to_string()))); self.binary_heap.pop();
} self.binary_heap.push(Reverse((score, line.to_string())));
} }
} }
} }

View File

@@ -364,10 +364,10 @@ async fn persist_tokens_async(
let codex_home = codex_home.to_path_buf(); let codex_home = codex_home.to_path_buf();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let auth_file = get_auth_file(&codex_home); let auth_file = get_auth_file(&codex_home);
if let Some(parent) = auth_file.parent() { if let Some(parent) = auth_file.parent()
if !parent.exists() { && !parent.exists()
std::fs::create_dir_all(parent).map_err(io::Error::other)?; {
} std::fs::create_dir_all(parent).map_err(io::Error::other)?;
} }
let mut auth = read_or_default(&auth_file); let mut auth = read_or_default(&auth_file);

View File

@@ -166,9 +166,8 @@ impl OllamaClient {
yield PullEvent::Error(err_msg.to_string()); yield PullEvent::Error(err_msg.to_string());
return; return;
} }
if let Some(status) = value.get("status").and_then(|s| s.as_str()) { if let Some(status) = value.get("status").and_then(|s| s.as_str())
if status == "success" { yield PullEvent::Success; return; } && status == "success" { yield PullEvent::Success; return; }
}
} }
} }
} }

View File

@@ -48,18 +48,16 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
} }
// Format with Prettier by passing individual files (no shell globbing) // Format with Prettier by passing individual files (no shell globbing)
if let Some(prettier_bin) = prettier { if let Some(prettier_bin) = prettier
if !ts_files.is_empty() { && !ts_files.is_empty()
let status = Command::new(prettier_bin) {
.arg("--write") let status = Command::new(prettier_bin)
.args(ts_files.iter().map(|p| p.as_os_str())) .arg("--write")
.status() .args(ts_files.iter().map(|p| p.as_os_str()))
.with_context(|| { .status()
format!("Failed to invoke Prettier at {}", prettier_bin.display()) .with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
})?; if !status.success() {
if !status.success() { return Err(anyhow!("Prettier failed with status {}", status));
return Err(anyhow!("Prettier failed with status {}", status));
}
} }
} }

View File

@@ -1,3 +1,3 @@
[toolchain] [toolchain]
channel = "1.88.0" channel = "1.89.0"
components = [ "clippy", "rustfmt", "rust-src"] components = [ "clippy", "rustfmt", "rust-src"]

View File

@@ -33,10 +33,10 @@ impl ApprovalModalView<'_> {
/// Advance to next request if the current one is finished. /// Advance to next request if the current one is finished.
fn maybe_advance(&mut self) { fn maybe_advance(&mut self) {
if self.current.is_complete() { if self.current.is_complete()
if let Some(req) = self.queue.pop() { && let Some(req) = self.queue.pop()
self.current = UserApprovalWidget::new(req, self.app_event_tx.clone()); {
} self.current = UserApprovalWidget::new(req, self.app_event_tx.clone());
} }
} }
} }

View File

@@ -1575,53 +1575,53 @@ mod tests {
} }
14 => { 14 => {
// Try inserting inside an existing element (should clamp to boundary) // Try inserting inside an existing element (should clamp to boundary)
if let Some(payload) = elem_texts.choose(&mut rng).cloned() { if let Some(payload) = elem_texts.choose(&mut rng).cloned()
if let Some(start) = ta.text().find(&payload) { && let Some(start) = ta.text().find(&payload)
let end = start + payload.len(); {
if end - start > 2 { let end = start + payload.len();
let pos = rng.random_range(start + 1..end - 1); if end - start > 2 {
let ins = rand_grapheme(&mut rng); let pos = rng.random_range(start + 1..end - 1);
ta.insert_str_at(pos, &ins); let ins = rand_grapheme(&mut rng);
} ta.insert_str_at(pos, &ins);
} }
} }
} }
15 => { 15 => {
// Replace a range that intersects an element -> whole element should be replaced // Replace a range that intersects an element -> whole element should be replaced
if let Some(payload) = elem_texts.choose(&mut rng).cloned() { if let Some(payload) = elem_texts.choose(&mut rng).cloned()
if let Some(start) = ta.text().find(&payload) { && let Some(start) = ta.text().find(&payload)
let end = start + payload.len(); {
// Create an intersecting range [start-δ, end-δ2) let end = start + payload.len();
let mut s = start.saturating_sub(rng.random_range(0..=2)); // Create an intersecting range [start-δ, end-δ2)
let mut e = (end + rng.random_range(0..=2)).min(ta.text().len()); let mut s = start.saturating_sub(rng.random_range(0..=2));
// Align to char boundaries to satisfy String::replace_range contract let mut e = (end + rng.random_range(0..=2)).min(ta.text().len());
let txt = ta.text(); // Align to char boundaries to satisfy String::replace_range contract
while s > 0 && !txt.is_char_boundary(s) { let txt = ta.text();
s -= 1; while s > 0 && !txt.is_char_boundary(s) {
} s -= 1;
while e < txt.len() && !txt.is_char_boundary(e) { }
e += 1; while e < txt.len() && !txt.is_char_boundary(e) {
} e += 1;
if s < e { }
// Small replacement text if s < e {
let mut srep = String::new(); // Small replacement text
for _ in 0..rng.random_range(0..=2) { let mut srep = String::new();
srep.push_str(&rand_grapheme(&mut rng)); for _ in 0..rng.random_range(0..=2) {
} srep.push_str(&rand_grapheme(&mut rng));
ta.replace_range(s..e, &srep);
} }
ta.replace_range(s..e, &srep);
} }
} }
} }
16 => { 16 => {
// Try setting the cursor to a position inside an element; it should clamp out // Try setting the cursor to a position inside an element; it should clamp out
if let Some(payload) = elem_texts.choose(&mut rng).cloned() { if let Some(payload) = elem_texts.choose(&mut rng).cloned()
if let Some(start) = ta.text().find(&payload) { && let Some(start) = ta.text().find(&payload)
let end = start + payload.len(); {
if end - start > 2 { let end = start + payload.len();
let pos = rng.random_range(start + 1..end - 1); if end - start > 2 {
ta.set_cursor(pos); let pos = rng.random_range(start + 1..end - 1);
} ta.set_cursor(pos);
} }
} }
} }

View File

@@ -339,18 +339,18 @@ async fn binary_size_transcript_matches_ideal_fixture() {
} }
} }
"app_event" => { "app_event" => {
if let Some(variant) = v.get("variant").and_then(|s| s.as_str()) { if let Some(variant) = v.get("variant").and_then(|s| s.as_str())
if variant == "CommitTick" { && variant == "CommitTick"
chat.on_commit_tick(); {
while let Ok(app_ev) = rx.try_recv() { chat.on_commit_tick();
if let AppEvent::InsertHistory(lines) = app_ev { while let Ok(app_ev) = rx.try_recv() {
transcript.push_str(&lines_to_single_string(&lines)); if let AppEvent::InsertHistory(lines) = app_ev {
crate::insert_history::insert_history_lines_to_writer( transcript.push_str(&lines_to_single_string(&lines));
&mut terminal, crate::insert_history::insert_history_lines_to_writer(
&mut ansi, &mut terminal,
lines, &mut ansi,
); lines,
} );
} }
} }
} }

View File

@@ -264,10 +264,10 @@ where
#[allow(clippy::print_stderr)] #[allow(clippy::print_stderr)]
fn drop(&mut self) { fn drop(&mut self) {
// Attempt to restore the cursor state // Attempt to restore the cursor state
if self.hidden_cursor { if self.hidden_cursor
if let Err(err) = self.show_cursor() { && let Err(err) = self.show_cursor()
eprintln!("Failed to show the cursor: {err}"); {
} eprintln!("Failed to show the cursor: {err}");
} }
} }
} }
@@ -309,7 +309,7 @@ where
} }
/// Get a Frame object which provides a consistent view into the terminal state for rendering. /// Get a Frame object which provides a consistent view into the terminal state for rendering.
pub fn get_frame(&mut self) -> Frame { pub fn get_frame(&mut self) -> Frame<'_> {
let count = self.frame_count; let count = self.frame_count;
Frame { Frame {
cursor_position: None, cursor_position: None,

View File

@@ -33,10 +33,10 @@ where
return None; return None;
} }
if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from) { if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from)
if let Ok(rel) = path.strip_prefix(&home_dir) { && let Ok(rel) = path.strip_prefix(&home_dir)
return Some(rel.to_path_buf()); {
} return Some(rel.to_path_buf());
} }
None None

View File

@@ -94,13 +94,13 @@ impl FileSearchManager {
// If there is an in-flight search that is definitely obsolete, // If there is an in-flight search that is definitely obsolete,
// cancel it now. // cancel it now.
if let Some(active_search) = &st.active_search { if let Some(active_search) = &st.active_search
if !query.starts_with(&active_search.query) { && !query.starts_with(&active_search.query)
active_search {
.cancellation_token active_search
.store(true, Ordering::Relaxed); .cancellation_token
st.active_search = None; .store(true, Ordering::Relaxed);
} st.active_search = None;
} }
// Schedule a search to run after debounce. // Schedule a search to run after debounce.
@@ -187,10 +187,10 @@ impl FileSearchManager {
{ {
#[expect(clippy::unwrap_used)] #[expect(clippy::unwrap_used)]
let mut st = search_state.lock().unwrap(); let mut st = search_state.lock().unwrap();
if let Some(active_search) = &st.active_search { if let Some(active_search) = &st.active_search
if Arc::ptr_eq(&active_search.cancellation_token, &cancellation_token) { && Arc::ptr_eq(&active_search.cancellation_token, &cancellation_token)
st.active_search = None; {
} st.active_search = None;
} }
} }
}); });

View File

@@ -541,33 +541,33 @@ pub(crate) fn new_status_output(
// 👤 Account (only if ChatGPT tokens exist), shown under the first block // 👤 Account (only if ChatGPT tokens exist), shown under the first block
let auth_file = get_auth_file(&config.codex_home); let auth_file = get_auth_file(&config.codex_home);
if let Ok(auth) = try_read_auth_json(&auth_file) { if let Ok(auth) = try_read_auth_json(&auth_file)
if let Some(tokens) = auth.tokens.clone() { && let Some(tokens) = auth.tokens.clone()
lines.push(Line::from(vec!["👤 ".into(), "Account".bold()])); {
lines.push(Line::from(" • Signed in with ChatGPT")); lines.push(Line::from(vec!["👤 ".into(), "Account".bold()]));
lines.push(Line::from(" • Signed in with ChatGPT"));
let info = tokens.id_token; let info = tokens.id_token;
if let Some(email) = &info.email { if let Some(email) = &info.email {
lines.push(Line::from(vec![" • Login: ".into(), email.clone().into()])); lines.push(Line::from(vec![" • Login: ".into(), email.clone().into()]));
}
match auth.openai_api_key.as_deref() {
Some(key) if !key.is_empty() => {
lines.push(Line::from(
" • Using API key. Run codex login to use ChatGPT plan",
));
}
_ => {
let plan_text = info
.get_chatgpt_plan_type()
.map(|s| title_case(&s))
.unwrap_or_else(|| "Unknown".to_string());
lines.push(Line::from(vec![" • Plan: ".into(), plan_text.into()]));
}
}
lines.push(Line::from(""));
} }
match auth.openai_api_key.as_deref() {
Some(key) if !key.is_empty() => {
lines.push(Line::from(
" • Using API key. Run codex login to use ChatGPT plan",
));
}
_ => {
let plan_text = info
.get_chatgpt_plan_type()
.map(|s| title_case(&s))
.unwrap_or_else(|| "Unknown".to_string());
lines.push(Line::from(vec![" • Plan: ".into(), plan_text.into()]));
}
}
lines.push(Line::from(""));
} }
// 🧠 Model // 🧠 Model
@@ -612,10 +612,10 @@ pub(crate) fn new_status_output(
" • Input: ".into(), " • Input: ".into(),
usage.non_cached_input().to_string().into(), usage.non_cached_input().to_string().into(),
]; ];
if let Some(cached) = usage.cached_input_tokens { if let Some(cached) = usage.cached_input_tokens
if cached > 0 { && cached > 0
input_line_spans.push(format!(" (+ {cached} cached)").into()); {
} input_line_spans.push(format!(" (+ {cached} cached)").into());
} }
lines.push(Line::from(input_line_spans)); lines.push(Line::from(input_line_spans));
// Output: <output> // Output: <output>
@@ -688,16 +688,15 @@ pub(crate) fn new_mcp_tools_output(
])); ]));
} }
if let Some(env) = cfg.env.as_ref() { if let Some(env) = cfg.env.as_ref()
if !env.is_empty() { && !env.is_empty()
let mut env_pairs: Vec<String> = {
env.iter().map(|(k, v)| format!("{k}={v}")).collect(); let mut env_pairs: Vec<String> = env.iter().map(|(k, v)| format!("{k}={v}")).collect();
env_pairs.sort(); env_pairs.sort();
lines.push(Line::from(vec![ lines.push(Line::from(vec![
" • Env: ".into(), " • Env: ".into(),
env_pairs.join(" ").into(), env_pairs.join(" ").into(),
])); ]));
}
} }
if names.is_empty() { if names.is_empty() {

View File

@@ -123,20 +123,20 @@ impl AuthModeWidget {
// If the user is already authenticated but the method differs from their // If the user is already authenticated but the method differs from their
// preferred auth method, show a brief explanation. // preferred auth method, show a brief explanation.
if let LoginStatus::AuthMode(current) = self.login_status { if let LoginStatus::AuthMode(current) = self.login_status
if current != self.preferred_auth_method { && current != self.preferred_auth_method
let to_label = |mode: AuthMode| match mode { {
AuthMode::ApiKey => "API key", let to_label = |mode: AuthMode| match mode {
AuthMode::ChatGPT => "ChatGPT", AuthMode::ApiKey => "API key",
}; AuthMode::ChatGPT => "ChatGPT",
let msg = format!( };
" Youre currently using {} while your preferred method is {}.", let msg = format!(
to_label(current), " Youre currently using {} while your preferred method is {}.",
to_label(self.preferred_auth_method) to_label(current),
); to_label(self.preferred_auth_method)
lines.push(Line::from(msg).style(Style::default())); );
lines.push(Line::from("")); lines.push(Line::from(msg).style(Style::default()));
} lines.push(Line::from(""));
} }
let create_mode_item = |idx: usize, let create_mode_item = |idx: usize,
@@ -222,15 +222,15 @@ impl AuthModeWidget {
spans.extend(shimmer_spans("Finish signing in via your browser")); spans.extend(shimmer_spans("Finish signing in via your browser"));
let mut lines = vec![Line::from(spans), Line::from("")]; let mut lines = vec![Line::from(spans), Line::from("")];
if let SignInState::ChatGptContinueInBrowser(state) = &self.sign_in_state { if let SignInState::ChatGptContinueInBrowser(state) = &self.sign_in_state
if !state.auth_url.is_empty() { && !state.auth_url.is_empty()
lines.push(Line::from(" If the link doesn't open automatically, open the following link to authenticate:")); {
lines.push(Line::from(vec![ lines.push(Line::from(" If the link doesn't open automatically, open the following link to authenticate:"));
Span::raw(" "), lines.push(Line::from(vec![
state.auth_url.as_str().cyan().underlined(), Span::raw(" "),
])); state.auth_url.as_str().cyan().underlined(),
lines.push(Line::from("")); ]));
} lines.push(Line::from(""));
} }
lines.push( lines.push(

View File

@@ -96,26 +96,26 @@ impl StreamController {
/// Begin a stream, flushing previously completed lines from any other /// Begin a stream, flushing previously completed lines from any other
/// active stream to maintain ordering. /// active stream to maintain ordering.
pub(crate) fn begin(&mut self, kind: StreamKind, sink: &impl HistorySink) { pub(crate) fn begin(&mut self, kind: StreamKind, sink: &impl HistorySink) {
if let Some(current) = self.current_stream { if let Some(current) = self.current_stream
if current != kind { && current != kind
// Synchronously flush completed lines from previous stream. {
let cfg = self.config.clone(); // Synchronously flush completed lines from previous stream.
let prev_state = self.state_mut(current); let cfg = self.config.clone();
let newly_completed = prev_state.collector.commit_complete_lines(&cfg); let prev_state = self.state_mut(current);
if !newly_completed.is_empty() { let newly_completed = prev_state.collector.commit_complete_lines(&cfg);
prev_state.enqueue(newly_completed); if !newly_completed.is_empty() {
} prev_state.enqueue(newly_completed);
let step = prev_state.drain_all();
if !step.history.is_empty() {
let mut lines: Lines = Vec::new();
self.emit_header_if_needed(current, &mut lines);
lines.extend(step.history);
// Ensure at most one trailing blank after the flushed block.
Self::ensure_single_trailing_blank(&mut lines);
sink.insert_history(lines);
}
self.current_stream = None;
} }
let step = prev_state.drain_all();
if !step.history.is_empty() {
let mut lines: Lines = Vec::new();
self.emit_header_if_needed(current, &mut lines);
lines.extend(step.history);
// Ensure at most one trailing blank after the flushed block.
Self::ensure_single_trailing_blank(&mut lines);
sink.insert_history(lines);
}
self.current_stream = None;
} }
if self.current_stream != Some(kind) { if self.current_stream != Some(kind) {

View File

@@ -53,12 +53,12 @@ pub(crate) fn format_json_compact(text: &str) -> Option<String> {
} }
' ' | '\t' if !in_string => { ' ' | '\t' if !in_string => {
// Add a space after : and , but only when not in a string // Add a space after : and , but only when not in a string
if let Some(&next_ch) = chars.peek() { if let Some(&next_ch) = chars.peek()
if let Some(last_ch) = result.chars().last() { && let Some(last_ch) = result.chars().last()
if (last_ch == ':' || last_ch == ',') && !matches!(next_ch, '}' | ']') { && (last_ch == ':' || last_ch == ',')
result.push(' '); && !matches!(next_ch, '}' | ']')
} {
} result.push(' ');
} }
} }
_ => { _ => {

View File

@@ -112,12 +112,11 @@ fn long_token_wraps() {
let mut count_a = 0usize; let mut count_a = 0usize;
for row in 0..6 { for row in 0..6 {
for col in 0..20 { for col in 0..20 {
if let Some(cell) = screen.cell(row, col) { if let Some(cell) = screen.cell(row, col)
if let Some(ch) = cell.contents().chars().next() { && let Some(ch) = cell.contents().chars().next()
if ch == 'A' { && ch == 'A'
count_a += 1; {
} count_a += 1;
}
} }
} }
} }