chore: upgrade to Rust 1.89 (#2465)
Codex created this PR from the following prompt: > upgrade this entire repo to Rust 1.89. Note that this requires updating codex-rs/rust-toolchain.toml as well as the workflows in .github/. Make sure that things are "clippy clean" as this change will likely uncover new Clippy errors. `just fmt` and `cargo clippy --tests` are sufficient to check for correctness Note this modifies a lot of lines because it folds nested `if` statements using `&&`. --- [//]: # (BEGIN SAPLING FOOTER) Stack created with [Sapling](https://sapling-scm.com). Best reviewed with [ReviewStack](https://reviewstack.dev/openai/codex/pull/2465). * #2467 * __->__ #2465
This commit is contained in:
2
.github/workflows/codex.yml
vendored
2
.github/workflows/codex.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.88
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
targets: x86_64-unknown-linux-gnu
|
targets: x86_64-unknown-linux-gnu
|
||||||
components: clippy
|
components: clippy
|
||||||
|
|||||||
4
.github/workflows/rust-ci.yml
vendored
4
.github/workflows/rust-ci.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
|||||||
working-directory: codex-rs
|
working-directory: codex-rs
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.88
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: cargo fmt
|
- name: cargo fmt
|
||||||
@@ -112,7 +112,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.88
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
targets: ${{ matrix.target }}
|
targets: ${{ matrix.target }}
|
||||||
components: clippy
|
components: clippy
|
||||||
|
|||||||
2
.github/workflows/rust-release.yml
vendored
2
.github/workflows/rust-release.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.88
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
targets: ${{ matrix.target }}
|
targets: ${{ matrix.target }}
|
||||||
|
|
||||||
|
|||||||
@@ -415,13 +415,13 @@ fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result<AffectedPaths> {
|
|||||||
for hunk in hunks {
|
for hunk in hunks {
|
||||||
match hunk {
|
match hunk {
|
||||||
Hunk::AddFile { path, contents } => {
|
Hunk::AddFile { path, contents } => {
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent()
|
||||||
if !parent.as_os_str().is_empty() {
|
&& !parent.as_os_str().is_empty()
|
||||||
|
{
|
||||||
std::fs::create_dir_all(parent).with_context(|| {
|
std::fs::create_dir_all(parent).with_context(|| {
|
||||||
format!("Failed to create parent directories for {}", path.display())
|
format!("Failed to create parent directories for {}", path.display())
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
std::fs::write(path, contents)
|
std::fs::write(path, contents)
|
||||||
.with_context(|| format!("Failed to write file {}", path.display()))?;
|
.with_context(|| format!("Failed to write file {}", path.display()))?;
|
||||||
added.push(path.clone());
|
added.push(path.clone());
|
||||||
@@ -439,16 +439,13 @@ fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result<AffectedPaths> {
|
|||||||
let AppliedPatch { new_contents, .. } =
|
let AppliedPatch { new_contents, .. } =
|
||||||
derive_new_contents_from_chunks(path, chunks)?;
|
derive_new_contents_from_chunks(path, chunks)?;
|
||||||
if let Some(dest) = move_path {
|
if let Some(dest) = move_path {
|
||||||
if let Some(parent) = dest.parent() {
|
if let Some(parent) = dest.parent()
|
||||||
if !parent.as_os_str().is_empty() {
|
&& !parent.as_os_str().is_empty()
|
||||||
|
{
|
||||||
std::fs::create_dir_all(parent).with_context(|| {
|
std::fs::create_dir_all(parent).with_context(|| {
|
||||||
format!(
|
format!("Failed to create parent directories for {}", dest.display())
|
||||||
"Failed to create parent directories for {}",
|
|
||||||
dest.display()
|
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
std::fs::write(dest, new_contents)
|
std::fs::write(dest, new_contents)
|
||||||
.with_context(|| format!("Failed to write file {}", dest.display()))?;
|
.with_context(|| format!("Failed to write file {}", dest.display()))?;
|
||||||
std::fs::remove_file(path)
|
std::fs::remove_file(path)
|
||||||
@@ -529,9 +526,12 @@ fn compute_replacements(
|
|||||||
// If a chunk has a `change_context`, we use seek_sequence to find it, then
|
// If a chunk has a `change_context`, we use seek_sequence to find it, then
|
||||||
// adjust our `line_index` to continue from there.
|
// adjust our `line_index` to continue from there.
|
||||||
if let Some(ctx_line) = &chunk.change_context {
|
if let Some(ctx_line) = &chunk.change_context {
|
||||||
if let Some(idx) =
|
if let Some(idx) = seek_sequence::seek_sequence(
|
||||||
seek_sequence::seek_sequence(original_lines, &[ctx_line.clone()], line_index, false)
|
original_lines,
|
||||||
{
|
std::slice::from_ref(ctx_line),
|
||||||
|
line_index,
|
||||||
|
false,
|
||||||
|
) {
|
||||||
line_index = idx + 1;
|
line_index = idx + 1;
|
||||||
} else {
|
} else {
|
||||||
return Err(ApplyPatchError::ComputeReplacements(format!(
|
return Err(ApplyPatchError::ComputeReplacements(format!(
|
||||||
|
|||||||
@@ -89,11 +89,11 @@ const ILLEGAL_ENV_VAR_PREFIX: &str = "CODEX_";
|
|||||||
/// Security: Do not allow `.env` files to create or modify any variables
|
/// Security: Do not allow `.env` files to create or modify any variables
|
||||||
/// with names starting with `CODEX_`.
|
/// with names starting with `CODEX_`.
|
||||||
fn load_dotenv() {
|
fn load_dotenv() {
|
||||||
if let Ok(codex_home) = codex_core::config::find_codex_home() {
|
if let Ok(codex_home) = codex_core::config::find_codex_home()
|
||||||
if let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env")) {
|
&& let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env"))
|
||||||
|
{
|
||||||
set_filtered(iter);
|
set_filtered(iter);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(iter) = dotenvy::dotenv_iter() {
|
if let Ok(iter) = dotenvy::dotenv_iter() {
|
||||||
set_filtered(iter);
|
set_filtered(iter);
|
||||||
|
|||||||
@@ -66,13 +66,13 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
|||||||
Ok(api_key) => {
|
Ok(api_key) => {
|
||||||
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
eprintln!("Logged in using an API key - {}", safe_format_key(&api_key));
|
||||||
|
|
||||||
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) {
|
if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR)
|
||||||
if env_api_key == api_key {
|
&& env_api_key == api_key
|
||||||
|
{
|
||||||
eprintln!(
|
eprintln!(
|
||||||
" API loaded from OPENAI_API_KEY environment variable or .env file"
|
" API loaded from OPENAI_API_KEY environment variable or .env file"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|||||||
@@ -290,14 +290,13 @@ async fn process_chat_sse<S>(
|
|||||||
.get("delta")
|
.get("delta")
|
||||||
.and_then(|d| d.get("content"))
|
.and_then(|d| d.get("content"))
|
||||||
.and_then(|c| c.as_str())
|
.and_then(|c| c.as_str())
|
||||||
|
&& !content.is_empty()
|
||||||
{
|
{
|
||||||
if !content.is_empty() {
|
|
||||||
assistant_text.push_str(content);
|
assistant_text.push_str(content);
|
||||||
let _ = tx_event
|
let _ = tx_event
|
||||||
.send(Ok(ResponseEvent::OutputTextDelta(content.to_string())))
|
.send(Ok(ResponseEvent::OutputTextDelta(content.to_string())))
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Forward any reasoning/thinking deltas if present.
|
// Forward any reasoning/thinking deltas if present.
|
||||||
// Some providers stream `reasoning` as a plain string while others
|
// Some providers stream `reasoning` as a plain string while others
|
||||||
@@ -333,8 +332,8 @@ async fn process_chat_sse<S>(
|
|||||||
.get("delta")
|
.get("delta")
|
||||||
.and_then(|d| d.get("tool_calls"))
|
.and_then(|d| d.get("tool_calls"))
|
||||||
.and_then(|tc| tc.as_array())
|
.and_then(|tc| tc.as_array())
|
||||||
|
&& let Some(tool_call) = tool_calls.first()
|
||||||
{
|
{
|
||||||
if let Some(tool_call) = tool_calls.first() {
|
|
||||||
// Mark that we have an active function call in progress.
|
// Mark that we have an active function call in progress.
|
||||||
fn_call_state.active = true;
|
fn_call_state.active = true;
|
||||||
|
|
||||||
@@ -349,14 +348,12 @@ async fn process_chat_sse<S>(
|
|||||||
fn_call_state.name.get_or_insert_with(|| name.to_string());
|
fn_call_state.name.get_or_insert_with(|| name.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(args_fragment) =
|
if let Some(args_fragment) = function.get("arguments").and_then(|a| a.as_str())
|
||||||
function.get("arguments").and_then(|a| a.as_str())
|
|
||||||
{
|
{
|
||||||
fn_call_state.arguments.push_str(args_fragment);
|
fn_call_state.arguments.push_str(args_fragment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Emit end-of-turn when finish_reason signals completion.
|
// Emit end-of-turn when finish_reason signals completion.
|
||||||
if let Some(finish_reason) = choice.get("finish_reason").and_then(|v| v.as_str()) {
|
if let Some(finish_reason) = choice.get("finish_reason").and_then(|v| v.as_str()) {
|
||||||
@@ -491,16 +488,15 @@ where
|
|||||||
// Only use the final assistant message if we have not
|
// Only use the final assistant message if we have not
|
||||||
// seen any deltas; otherwise, deltas already built the
|
// seen any deltas; otherwise, deltas already built the
|
||||||
// cumulative text and this would duplicate it.
|
// cumulative text and this would duplicate it.
|
||||||
if this.cumulative.is_empty() {
|
if this.cumulative.is_empty()
|
||||||
if let crate::models::ResponseItem::Message { content, .. } = &item {
|
&& let crate::models::ResponseItem::Message { content, .. } = &item
|
||||||
if let Some(text) = content.iter().find_map(|c| match c {
|
&& let Some(text) = content.iter().find_map(|c| match c {
|
||||||
crate::models::ContentItem::OutputText { text } => Some(text),
|
crate::models::ContentItem::OutputText { text } => Some(text),
|
||||||
_ => None,
|
_ => None,
|
||||||
}) {
|
})
|
||||||
|
{
|
||||||
this.cumulative.push_str(text);
|
this.cumulative.push_str(text);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swallow assistant message here; emit on Completed.
|
// Swallow assistant message here; emit on Completed.
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
@@ -544,12 +544,12 @@ impl Session {
|
|||||||
|
|
||||||
pub fn remove_task(&self, sub_id: &str) {
|
pub fn remove_task(&self, sub_id: &str) {
|
||||||
let mut state = self.state.lock_unchecked();
|
let mut state = self.state.lock_unchecked();
|
||||||
if let Some(task) = &state.current_task {
|
if let Some(task) = &state.current_task
|
||||||
if task.sub_id == sub_id {
|
&& task.sub_id == sub_id
|
||||||
|
{
|
||||||
state.current_task.take();
|
state.current_task.take();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Sends the given event to the client and swallows the send event, if
|
/// Sends the given event to the client and swallows the send event, if
|
||||||
/// any, logging it as an error.
|
/// any, logging it as an error.
|
||||||
@@ -1239,8 +1239,9 @@ async fn submission_loop(
|
|||||||
// Gracefully flush and shutdown rollout recorder on session end so tests
|
// Gracefully flush and shutdown rollout recorder on session end so tests
|
||||||
// that inspect the rollout file do not race with the background writer.
|
// that inspect the rollout file do not race with the background writer.
|
||||||
let recorder_opt = sess.rollout.lock_unchecked().take();
|
let recorder_opt = sess.rollout.lock_unchecked().take();
|
||||||
if let Some(rec) = recorder_opt {
|
if let Some(rec) = recorder_opt
|
||||||
if let Err(e) = rec.shutdown().await {
|
&& let Err(e) = rec.shutdown().await
|
||||||
|
{
|
||||||
warn!("failed to shutdown rollout recorder: {e}");
|
warn!("failed to shutdown rollout recorder: {e}");
|
||||||
let event = Event {
|
let event = Event {
|
||||||
id: sub.id.clone(),
|
id: sub.id.clone(),
|
||||||
@@ -1252,7 +1253,6 @@ async fn submission_loop(
|
|||||||
warn!("failed to send error message: {e:?}");
|
warn!("failed to send error message: {e:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let event = Event {
|
let event = Event {
|
||||||
id: sub.id.clone(),
|
id: sub.id.clone(),
|
||||||
|
|||||||
@@ -759,11 +759,11 @@ fn default_model() -> String {
|
|||||||
pub fn find_codex_home() -> std::io::Result<PathBuf> {
|
pub fn find_codex_home() -> std::io::Result<PathBuf> {
|
||||||
// Honor the `CODEX_HOME` environment variable when it is set to allow users
|
// Honor the `CODEX_HOME` environment variable when it is set to allow users
|
||||||
// (and tests) to override the default location.
|
// (and tests) to override the default location.
|
||||||
if let Ok(val) = std::env::var("CODEX_HOME") {
|
if let Ok(val) = std::env::var("CODEX_HOME")
|
||||||
if !val.is_empty() {
|
&& !val.is_empty()
|
||||||
|
{
|
||||||
return PathBuf::from(val).canonicalize();
|
return PathBuf::from(val).canonicalize();
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut p = home_dir().ok_or_else(|| {
|
let mut p = home_dir().ok_or_else(|| {
|
||||||
std::io::Error::new(
|
std::io::Error::new(
|
||||||
|
|||||||
@@ -51,34 +51,31 @@ pub async fn collect_git_info(cwd: &Path) -> Option<GitInfo> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Process commit hash
|
// Process commit hash
|
||||||
if let Some(output) = commit_result {
|
if let Some(output) = commit_result
|
||||||
if output.status.success() {
|
&& output.status.success()
|
||||||
if let Ok(hash) = String::from_utf8(output.stdout) {
|
&& let Ok(hash) = String::from_utf8(output.stdout)
|
||||||
|
{
|
||||||
git_info.commit_hash = Some(hash.trim().to_string());
|
git_info.commit_hash = Some(hash.trim().to_string());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process branch name
|
// Process branch name
|
||||||
if let Some(output) = branch_result {
|
if let Some(output) = branch_result
|
||||||
if output.status.success() {
|
&& output.status.success()
|
||||||
if let Ok(branch) = String::from_utf8(output.stdout) {
|
&& let Ok(branch) = String::from_utf8(output.stdout)
|
||||||
|
{
|
||||||
let branch = branch.trim();
|
let branch = branch.trim();
|
||||||
if branch != "HEAD" {
|
if branch != "HEAD" {
|
||||||
git_info.branch = Some(branch.to_string());
|
git_info.branch = Some(branch.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process repository URL
|
// Process repository URL
|
||||||
if let Some(output) = url_result {
|
if let Some(output) = url_result
|
||||||
if output.status.success() {
|
&& output.status.success()
|
||||||
if let Ok(url) = String::from_utf8(output.stdout) {
|
&& let Ok(url) = String::from_utf8(output.stdout)
|
||||||
|
{
|
||||||
git_info.repository_url = Some(url.trim().to_string());
|
git_info.repository_url = Some(url.trim().to_string());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(git_info)
|
Some(git_info)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,21 +12,18 @@ pub fn is_known_safe_command(command: &[String]) -> bool {
|
|||||||
// introduce side effects ( "&&", "||", ";", and "|" ). If every
|
// introduce side effects ( "&&", "||", ";", and "|" ). If every
|
||||||
// individual command in the script is itself a known‑safe command, then
|
// individual command in the script is itself a known‑safe command, then
|
||||||
// the composite expression is considered safe.
|
// the composite expression is considered safe.
|
||||||
if let [bash, flag, script] = command {
|
if let [bash, flag, script] = command
|
||||||
if bash == "bash" && flag == "-lc" {
|
&& bash == "bash"
|
||||||
if let Some(tree) = try_parse_bash(script) {
|
&& flag == "-lc"
|
||||||
if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) {
|
&& let Some(tree) = try_parse_bash(script)
|
||||||
if !all_commands.is_empty()
|
&& let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script)
|
||||||
|
&& !all_commands.is_empty()
|
||||||
&& all_commands
|
&& all_commands
|
||||||
.iter()
|
.iter()
|
||||||
.all(|cmd| is_safe_to_call_with_exec(cmd))
|
.all(|cmd| is_safe_to_call_with_exec(cmd))
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -167,13 +167,13 @@ impl ModelProviderInfo {
|
|||||||
|
|
||||||
if let Some(env_headers) = &self.env_http_headers {
|
if let Some(env_headers) = &self.env_http_headers {
|
||||||
for (header, env_var) in env_headers {
|
for (header, env_var) in env_headers {
|
||||||
if let Ok(val) = std::env::var(env_var) {
|
if let Ok(val) = std::env::var(env_var)
|
||||||
if !val.trim().is_empty() {
|
&& !val.trim().is_empty()
|
||||||
|
{
|
||||||
builder = builder.header(header, val);
|
builder = builder.header(header, val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
builder
|
builder
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -420,13 +420,13 @@ fn sanitize_json_schema(value: &mut JsonValue) {
|
|||||||
}
|
}
|
||||||
JsonValue::Object(map) => {
|
JsonValue::Object(map) => {
|
||||||
// First, recursively sanitize known nested schema holders
|
// First, recursively sanitize known nested schema holders
|
||||||
if let Some(props) = map.get_mut("properties") {
|
if let Some(props) = map.get_mut("properties")
|
||||||
if let Some(props_map) = props.as_object_mut() {
|
&& let Some(props_map) = props.as_object_mut()
|
||||||
|
{
|
||||||
for (_k, v) in props_map.iter_mut() {
|
for (_k, v) in props_map.iter_mut() {
|
||||||
sanitize_json_schema(v);
|
sanitize_json_schema(v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if let Some(items) = map.get_mut("items") {
|
if let Some(items) = map.get_mut("items") {
|
||||||
sanitize_json_schema(items);
|
sanitize_json_schema(items);
|
||||||
}
|
}
|
||||||
@@ -444,21 +444,21 @@ fn sanitize_json_schema(value: &mut JsonValue) {
|
|||||||
.map(|s| s.to_string());
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
// If type is an array (union), pick first supported; else leave to inference
|
// If type is an array (union), pick first supported; else leave to inference
|
||||||
if ty.is_none() {
|
if ty.is_none()
|
||||||
if let Some(JsonValue::Array(types)) = map.get("type") {
|
&& let Some(JsonValue::Array(types)) = map.get("type")
|
||||||
|
{
|
||||||
for t in types {
|
for t in types {
|
||||||
if let Some(tt) = t.as_str() {
|
if let Some(tt) = t.as_str()
|
||||||
if matches!(
|
&& matches!(
|
||||||
tt,
|
tt,
|
||||||
"object" | "array" | "string" | "number" | "integer" | "boolean"
|
"object" | "array" | "string" | "number" | "integer" | "boolean"
|
||||||
) {
|
)
|
||||||
|
{
|
||||||
ty = Some(tt.to_string());
|
ty = Some(tt.to_string());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Infer type if still missing
|
// Infer type if still missing
|
||||||
if ty.is_none() {
|
if ty.is_none() {
|
||||||
|
|||||||
@@ -1196,11 +1196,11 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// echo ... && ...rest => ...rest
|
// echo ... && ...rest => ...rest
|
||||||
if let ParsedCommand::Unknown { cmd } = &commands[0] {
|
if let ParsedCommand::Unknown { cmd } = &commands[0]
|
||||||
if shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("echo")) {
|
&& shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("echo"))
|
||||||
|
{
|
||||||
return Some(commands[1..].to_vec());
|
return Some(commands[1..].to_vec());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// cd foo && [any Test command] => [any Test command]
|
// cd foo && [any Test command] => [any Test command]
|
||||||
if let Some(idx) = commands.iter().position(|pc| match pc {
|
if let Some(idx) = commands.iter().position(|pc| match pc {
|
||||||
@@ -1208,8 +1208,7 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
|
|||||||
shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("cd"))
|
shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("cd"))
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}) {
|
}) && commands
|
||||||
if commands
|
|
||||||
.iter()
|
.iter()
|
||||||
.skip(idx + 1)
|
.skip(idx + 1)
|
||||||
.any(|pc| matches!(pc, ParsedCommand::Test { .. }))
|
.any(|pc| matches!(pc, ParsedCommand::Test { .. }))
|
||||||
@@ -1219,7 +1218,6 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option<Vec<ParsedCommand>> {
|
|||||||
out.extend_from_slice(&commands[idx + 1..]);
|
out.extend_from_slice(&commands[idx + 1..]);
|
||||||
return Some(out);
|
return Some(out);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// cmd || true => cmd
|
// cmd || true => cmd
|
||||||
if let Some(idx) = commands.iter().position(|pc| match pc {
|
if let Some(idx) = commands.iter().position(|pc| match pc {
|
||||||
@@ -1564,9 +1562,10 @@ fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
|
|||||||
if bash != "bash" || flag != "-lc" {
|
if bash != "bash" || flag != "-lc" {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if let Some(tree) = try_parse_bash(script) {
|
if let Some(tree) = try_parse_bash(script)
|
||||||
if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) {
|
&& let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script)
|
||||||
if !all_commands.is_empty() {
|
&& !all_commands.is_empty()
|
||||||
|
{
|
||||||
let script_tokens = shlex_split(script)
|
let script_tokens = shlex_split(script)
|
||||||
.unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()]);
|
.unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()]);
|
||||||
// Strip small formatting helpers (e.g., head/tail/awk/wc/etc) so we
|
// Strip small formatting helpers (e.g., head/tail/awk/wc/etc) so we
|
||||||
@@ -1663,9 +1662,7 @@ fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
|
|||||||
tool,
|
tool,
|
||||||
targets,
|
targets,
|
||||||
},
|
},
|
||||||
ParsedCommand::Test { cmd, .. } => {
|
ParsedCommand::Test { cmd, .. } => ParsedCommand::Test { cmd: cmd.clone() },
|
||||||
ParsedCommand::Test { cmd: cmd.clone() }
|
|
||||||
}
|
|
||||||
ParsedCommand::Lint {
|
ParsedCommand::Lint {
|
||||||
tool, targets, cmd, ..
|
tool, targets, cmd, ..
|
||||||
} => ParsedCommand::Lint {
|
} => ParsedCommand::Lint {
|
||||||
@@ -1684,8 +1681,6 @@ fn parse_bash_lc_commands(original: &[String]) -> Option<Vec<ParsedCommand>> {
|
|||||||
}
|
}
|
||||||
return Some(commands);
|
return Some(commands);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(vec![ParsedCommand::Unknown {
|
Some(vec![ParsedCommand::Unknown {
|
||||||
cmd: script.clone(),
|
cmd: script.clone(),
|
||||||
}])
|
}])
|
||||||
|
|||||||
@@ -231,14 +231,14 @@ fn is_write_patch_constrained_to_writable_paths(
|
|||||||
if !is_path_writable(path) {
|
if !is_path_writable(path) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if let Some(dest) = move_path {
|
if let Some(dest) = move_path
|
||||||
if !is_path_writable(dest) {
|
&& !is_path_writable(dest)
|
||||||
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,15 +70,15 @@ pub async fn default_user_shell() -> Shell {
|
|||||||
}
|
}
|
||||||
let stdout = String::from_utf8_lossy(&o.stdout);
|
let stdout = String::from_utf8_lossy(&o.stdout);
|
||||||
for line in stdout.lines() {
|
for line in stdout.lines() {
|
||||||
if let Some(shell_path) = line.strip_prefix("UserShell: ") {
|
if let Some(shell_path) = line.strip_prefix("UserShell: ")
|
||||||
if shell_path.ends_with("/zsh") {
|
&& shell_path.ends_with("/zsh")
|
||||||
|
{
|
||||||
return Shell::Zsh(ZshShell {
|
return Shell::Zsh(ZshShell {
|
||||||
shell_path: shell_path.to_string(),
|
shell_path: shell_path.to_string(),
|
||||||
zshrc_path: format!("{home}/.zshrc"),
|
zshrc_path: format!("{home}/.zshrc"),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Shell::Unknown
|
Shell::Unknown
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -297,16 +297,15 @@ async fn integration_creates_and_checks_session_file() {
|
|||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(_) => continue,
|
Err(_) => continue,
|
||||||
};
|
};
|
||||||
if item.get("type").and_then(|t| t.as_str()) == Some("message") {
|
if item.get("type").and_then(|t| t.as_str()) == Some("message")
|
||||||
if let Some(c) = item.get("content") {
|
&& let Some(c) = item.get("content")
|
||||||
if c.to_string().contains(&marker) {
|
&& c.to_string().contains(&marker)
|
||||||
|
{
|
||||||
matching_path = Some(path.to_path_buf());
|
matching_path = Some(path.to_path_buf());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
if matching_path.is_none() {
|
if matching_path.is_none() {
|
||||||
std::thread::sleep(Duration::from_millis(50));
|
std::thread::sleep(Duration::from_millis(50));
|
||||||
}
|
}
|
||||||
@@ -376,15 +375,14 @@ async fn integration_creates_and_checks_session_file() {
|
|||||||
let Ok(item) = serde_json::from_str::<serde_json::Value>(line) else {
|
let Ok(item) = serde_json::from_str::<serde_json::Value>(line) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
if item.get("type").and_then(|t| t.as_str()) == Some("message") {
|
if item.get("type").and_then(|t| t.as_str()) == Some("message")
|
||||||
if let Some(c) = item.get("content") {
|
&& let Some(c) = item.get("content")
|
||||||
if c.to_string().contains(&marker) {
|
&& c.to_string().contains(&marker)
|
||||||
|
{
|
||||||
found_message = true;
|
found_message = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(
|
assert!(
|
||||||
found_message,
|
found_message,
|
||||||
"No message found in session file containing the marker"
|
"No message found in session file containing the marker"
|
||||||
|
|||||||
@@ -29,9 +29,9 @@ pub(crate) fn handle_last_message(last_agent_message: Option<&str>, output_file:
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) {
|
fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) {
|
||||||
if let Some(path) = last_message_path {
|
if let Some(path) = last_message_path
|
||||||
if let Err(e) = std::fs::write(path, contents) {
|
&& let Err(e) = std::fs::write(path, contents)
|
||||||
|
{
|
||||||
eprintln!("Failed to write last message file {path:?}: {e}");
|
eprintln!("Failed to write last message file {path:?}: {e}");
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -214,7 +214,12 @@ system_path=[{fake_cp:?}]
|
|||||||
|
|
||||||
// Only readable folders specified.
|
// Only readable folders specified.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
checker.check(valid_exec.clone(), &cwd, &[root_path.clone()], &[]),
|
checker.check(
|
||||||
|
valid_exec.clone(),
|
||||||
|
&cwd,
|
||||||
|
std::slice::from_ref(&root_path),
|
||||||
|
&[]
|
||||||
|
),
|
||||||
Err(WriteablePathNotInWriteableFolders {
|
Err(WriteablePathNotInWriteableFolders {
|
||||||
file: dest_path.clone(),
|
file: dest_path.clone(),
|
||||||
folders: vec![]
|
folders: vec![]
|
||||||
@@ -226,8 +231,8 @@ system_path=[{fake_cp:?}]
|
|||||||
checker.check(
|
checker.check(
|
||||||
valid_exec.clone(),
|
valid_exec.clone(),
|
||||||
&cwd,
|
&cwd,
|
||||||
&[root_path.clone()],
|
std::slice::from_ref(&root_path),
|
||||||
&[root_path.clone()]
|
std::slice::from_ref(&root_path)
|
||||||
),
|
),
|
||||||
Ok(cp.clone()),
|
Ok(cp.clone()),
|
||||||
);
|
);
|
||||||
@@ -246,8 +251,8 @@ system_path=[{fake_cp:?}]
|
|||||||
checker.check(
|
checker.check(
|
||||||
valid_exec_call_folders_as_args,
|
valid_exec_call_folders_as_args,
|
||||||
&cwd,
|
&cwd,
|
||||||
&[root_path.clone()],
|
std::slice::from_ref(&root_path),
|
||||||
&[root_path.clone()]
|
std::slice::from_ref(&root_path)
|
||||||
),
|
),
|
||||||
Ok(cp.clone()),
|
Ok(cp.clone()),
|
||||||
);
|
);
|
||||||
@@ -269,8 +274,8 @@ system_path=[{fake_cp:?}]
|
|||||||
checker.check(
|
checker.check(
|
||||||
exec_with_parent_of_readable_folder,
|
exec_with_parent_of_readable_folder,
|
||||||
&cwd,
|
&cwd,
|
||||||
&[root_path.clone()],
|
std::slice::from_ref(&root_path),
|
||||||
&[dest_path.clone()]
|
std::slice::from_ref(&dest_path)
|
||||||
),
|
),
|
||||||
Err(ReadablePathNotInReadableFolders {
|
Err(ReadablePathNotInReadableFolders {
|
||||||
file: root_path.parent().unwrap().to_path_buf(),
|
file: root_path.parent().unwrap().to_path_buf(),
|
||||||
|
|||||||
@@ -56,8 +56,9 @@ impl Policy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for arg in args {
|
for arg in args {
|
||||||
if let Some(regex) = &self.forbidden_substrings_pattern {
|
if let Some(regex) = &self.forbidden_substrings_pattern
|
||||||
if regex.is_match(arg) {
|
&& regex.is_match(arg)
|
||||||
|
{
|
||||||
return Ok(MatchedExec::Forbidden {
|
return Ok(MatchedExec::Forbidden {
|
||||||
cause: Forbidden::Arg {
|
cause: Forbidden::Arg {
|
||||||
arg: arg.clone(),
|
arg: arg.clone(),
|
||||||
@@ -67,7 +68,6 @@ impl Policy {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut last_err = Err(Error::NoSpecForProgram {
|
let mut last_err = Err(Error::NoSpecForProgram {
|
||||||
program: program.clone(),
|
program: program.clone(),
|
||||||
|
|||||||
@@ -3,13 +3,13 @@ use crate::error::Result;
|
|||||||
|
|
||||||
pub fn parse_sed_command(sed_command: &str) -> Result<()> {
|
pub fn parse_sed_command(sed_command: &str) -> Result<()> {
|
||||||
// For now, we parse only commands like `122,202p`.
|
// For now, we parse only commands like `122,202p`.
|
||||||
if let Some(stripped) = sed_command.strip_suffix("p") {
|
if let Some(stripped) = sed_command.strip_suffix("p")
|
||||||
if let Some((first, rest)) = stripped.split_once(",") {
|
&& let Some((first, rest)) = stripped.split_once(",")
|
||||||
if first.parse::<u64>().is_ok() && rest.parse::<u64>().is_ok() {
|
&& first.parse::<u64>().is_ok()
|
||||||
|
&& rest.parse::<u64>().is_ok()
|
||||||
|
{
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(Error::SedCommandNotProvablySafe {
|
Err(Error::SedCommandNotProvablySafe {
|
||||||
command: sed_command.to_string(),
|
command: sed_command.to_string(),
|
||||||
|
|||||||
@@ -228,14 +228,14 @@ pub fn run(
|
|||||||
for &Reverse((score, ref line)) in best_list.binary_heap.iter() {
|
for &Reverse((score, ref line)) in best_list.binary_heap.iter() {
|
||||||
if global_heap.len() < limit.get() {
|
if global_heap.len() < limit.get() {
|
||||||
global_heap.push(Reverse((score, line.clone())));
|
global_heap.push(Reverse((score, line.clone())));
|
||||||
} else if let Some(min_element) = global_heap.peek() {
|
} else if let Some(min_element) = global_heap.peek()
|
||||||
if score > min_element.0.0 {
|
&& score > min_element.0.0
|
||||||
|
{
|
||||||
global_heap.pop();
|
global_heap.pop();
|
||||||
global_heap.push(Reverse((score, line.clone())));
|
global_heap.push(Reverse((score, line.clone())));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut raw_matches: Vec<(u32, String)> = global_heap.into_iter().map(|r| r.0).collect();
|
let mut raw_matches: Vec<(u32, String)> = global_heap.into_iter().map(|r| r.0).collect();
|
||||||
sort_matches(&mut raw_matches);
|
sort_matches(&mut raw_matches);
|
||||||
@@ -320,14 +320,14 @@ impl BestMatchesList {
|
|||||||
|
|
||||||
if self.binary_heap.len() < self.max_count {
|
if self.binary_heap.len() < self.max_count {
|
||||||
self.binary_heap.push(Reverse((score, line.to_string())));
|
self.binary_heap.push(Reverse((score, line.to_string())));
|
||||||
} else if let Some(min_element) = self.binary_heap.peek() {
|
} else if let Some(min_element) = self.binary_heap.peek()
|
||||||
if score > min_element.0.0 {
|
&& score > min_element.0.0
|
||||||
|
{
|
||||||
self.binary_heap.pop();
|
self.binary_heap.pop();
|
||||||
self.binary_heap.push(Reverse((score, line.to_string())));
|
self.binary_heap.push(Reverse((score, line.to_string())));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct WorkerCount {
|
struct WorkerCount {
|
||||||
|
|||||||
@@ -364,11 +364,11 @@ async fn persist_tokens_async(
|
|||||||
let codex_home = codex_home.to_path_buf();
|
let codex_home = codex_home.to_path_buf();
|
||||||
tokio::task::spawn_blocking(move || {
|
tokio::task::spawn_blocking(move || {
|
||||||
let auth_file = get_auth_file(&codex_home);
|
let auth_file = get_auth_file(&codex_home);
|
||||||
if let Some(parent) = auth_file.parent() {
|
if let Some(parent) = auth_file.parent()
|
||||||
if !parent.exists() {
|
&& !parent.exists()
|
||||||
|
{
|
||||||
std::fs::create_dir_all(parent).map_err(io::Error::other)?;
|
std::fs::create_dir_all(parent).map_err(io::Error::other)?;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut auth = read_or_default(&auth_file);
|
let mut auth = read_or_default(&auth_file);
|
||||||
if let Some(key) = api_key {
|
if let Some(key) = api_key {
|
||||||
|
|||||||
@@ -166,9 +166,8 @@ impl OllamaClient {
|
|||||||
yield PullEvent::Error(err_msg.to_string());
|
yield PullEvent::Error(err_msg.to_string());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if let Some(status) = value.get("status").and_then(|s| s.as_str()) {
|
if let Some(status) = value.get("status").and_then(|s| s.as_str())
|
||||||
if status == "success" { yield PullEvent::Success; return; }
|
&& status == "success" { yield PullEvent::Success; return; }
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,20 +48,18 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Format with Prettier by passing individual files (no shell globbing)
|
// Format with Prettier by passing individual files (no shell globbing)
|
||||||
if let Some(prettier_bin) = prettier {
|
if let Some(prettier_bin) = prettier
|
||||||
if !ts_files.is_empty() {
|
&& !ts_files.is_empty()
|
||||||
|
{
|
||||||
let status = Command::new(prettier_bin)
|
let status = Command::new(prettier_bin)
|
||||||
.arg("--write")
|
.arg("--write")
|
||||||
.args(ts_files.iter().map(|p| p.as_os_str()))
|
.args(ts_files.iter().map(|p| p.as_os_str()))
|
||||||
.status()
|
.status()
|
||||||
.with_context(|| {
|
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||||
format!("Failed to invoke Prettier at {}", prettier_bin.display())
|
|
||||||
})?;
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
return Err(anyhow!("Prettier failed with status {}", status));
|
return Err(anyhow!("Prettier failed with status {}", status));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.88.0"
|
channel = "1.89.0"
|
||||||
components = [ "clippy", "rustfmt", "rust-src"]
|
components = [ "clippy", "rustfmt", "rust-src"]
|
||||||
|
|||||||
@@ -33,12 +33,12 @@ impl ApprovalModalView<'_> {
|
|||||||
|
|
||||||
/// Advance to next request if the current one is finished.
|
/// Advance to next request if the current one is finished.
|
||||||
fn maybe_advance(&mut self) {
|
fn maybe_advance(&mut self) {
|
||||||
if self.current.is_complete() {
|
if self.current.is_complete()
|
||||||
if let Some(req) = self.queue.pop() {
|
&& let Some(req) = self.queue.pop()
|
||||||
|
{
|
||||||
self.current = UserApprovalWidget::new(req, self.app_event_tx.clone());
|
self.current = UserApprovalWidget::new(req, self.app_event_tx.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> BottomPaneView<'a> for ApprovalModalView<'a> {
|
impl<'a> BottomPaneView<'a> for ApprovalModalView<'a> {
|
||||||
|
|||||||
@@ -1575,8 +1575,9 @@ mod tests {
|
|||||||
}
|
}
|
||||||
14 => {
|
14 => {
|
||||||
// Try inserting inside an existing element (should clamp to boundary)
|
// Try inserting inside an existing element (should clamp to boundary)
|
||||||
if let Some(payload) = elem_texts.choose(&mut rng).cloned() {
|
if let Some(payload) = elem_texts.choose(&mut rng).cloned()
|
||||||
if let Some(start) = ta.text().find(&payload) {
|
&& let Some(start) = ta.text().find(&payload)
|
||||||
|
{
|
||||||
let end = start + payload.len();
|
let end = start + payload.len();
|
||||||
if end - start > 2 {
|
if end - start > 2 {
|
||||||
let pos = rng.random_range(start + 1..end - 1);
|
let pos = rng.random_range(start + 1..end - 1);
|
||||||
@@ -1585,11 +1586,11 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
15 => {
|
15 => {
|
||||||
// Replace a range that intersects an element -> whole element should be replaced
|
// Replace a range that intersects an element -> whole element should be replaced
|
||||||
if let Some(payload) = elem_texts.choose(&mut rng).cloned() {
|
if let Some(payload) = elem_texts.choose(&mut rng).cloned()
|
||||||
if let Some(start) = ta.text().find(&payload) {
|
&& let Some(start) = ta.text().find(&payload)
|
||||||
|
{
|
||||||
let end = start + payload.len();
|
let end = start + payload.len();
|
||||||
// Create an intersecting range [start-δ, end-δ2)
|
// Create an intersecting range [start-δ, end-δ2)
|
||||||
let mut s = start.saturating_sub(rng.random_range(0..=2));
|
let mut s = start.saturating_sub(rng.random_range(0..=2));
|
||||||
@@ -1612,11 +1613,11 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
16 => {
|
16 => {
|
||||||
// Try setting the cursor to a position inside an element; it should clamp out
|
// Try setting the cursor to a position inside an element; it should clamp out
|
||||||
if let Some(payload) = elem_texts.choose(&mut rng).cloned() {
|
if let Some(payload) = elem_texts.choose(&mut rng).cloned()
|
||||||
if let Some(start) = ta.text().find(&payload) {
|
&& let Some(start) = ta.text().find(&payload)
|
||||||
|
{
|
||||||
let end = start + payload.len();
|
let end = start + payload.len();
|
||||||
if end - start > 2 {
|
if end - start > 2 {
|
||||||
let pos = rng.random_range(start + 1..end - 1);
|
let pos = rng.random_range(start + 1..end - 1);
|
||||||
@@ -1624,7 +1625,6 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
// Jump to word boundaries
|
// Jump to word boundaries
|
||||||
if rng.random_bool(0.5) {
|
if rng.random_bool(0.5) {
|
||||||
|
|||||||
@@ -339,8 +339,9 @@ async fn binary_size_transcript_matches_ideal_fixture() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
"app_event" => {
|
"app_event" => {
|
||||||
if let Some(variant) = v.get("variant").and_then(|s| s.as_str()) {
|
if let Some(variant) = v.get("variant").and_then(|s| s.as_str())
|
||||||
if variant == "CommitTick" {
|
&& variant == "CommitTick"
|
||||||
|
{
|
||||||
chat.on_commit_tick();
|
chat.on_commit_tick();
|
||||||
while let Ok(app_ev) = rx.try_recv() {
|
while let Ok(app_ev) = rx.try_recv() {
|
||||||
if let AppEvent::InsertHistory(lines) = app_ev {
|
if let AppEvent::InsertHistory(lines) = app_ev {
|
||||||
@@ -354,7 +355,6 @@ async fn binary_size_transcript_matches_ideal_fixture() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -264,12 +264,12 @@ where
|
|||||||
#[allow(clippy::print_stderr)]
|
#[allow(clippy::print_stderr)]
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
// Attempt to restore the cursor state
|
// Attempt to restore the cursor state
|
||||||
if self.hidden_cursor {
|
if self.hidden_cursor
|
||||||
if let Err(err) = self.show_cursor() {
|
&& let Err(err) = self.show_cursor()
|
||||||
|
{
|
||||||
eprintln!("Failed to show the cursor: {err}");
|
eprintln!("Failed to show the cursor: {err}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B> Terminal<B>
|
impl<B> Terminal<B>
|
||||||
@@ -309,7 +309,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get a Frame object which provides a consistent view into the terminal state for rendering.
|
/// Get a Frame object which provides a consistent view into the terminal state for rendering.
|
||||||
pub fn get_frame(&mut self) -> Frame {
|
pub fn get_frame(&mut self) -> Frame<'_> {
|
||||||
let count = self.frame_count;
|
let count = self.frame_count;
|
||||||
Frame {
|
Frame {
|
||||||
cursor_position: None,
|
cursor_position: None,
|
||||||
|
|||||||
@@ -33,11 +33,11 @@ where
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from) {
|
if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from)
|
||||||
if let Ok(rel) = path.strip_prefix(&home_dir) {
|
&& let Ok(rel) = path.strip_prefix(&home_dir)
|
||||||
|
{
|
||||||
return Some(rel.to_path_buf());
|
return Some(rel.to_path_buf());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -94,14 +94,14 @@ impl FileSearchManager {
|
|||||||
|
|
||||||
// If there is an in-flight search that is definitely obsolete,
|
// If there is an in-flight search that is definitely obsolete,
|
||||||
// cancel it now.
|
// cancel it now.
|
||||||
if let Some(active_search) = &st.active_search {
|
if let Some(active_search) = &st.active_search
|
||||||
if !query.starts_with(&active_search.query) {
|
&& !query.starts_with(&active_search.query)
|
||||||
|
{
|
||||||
active_search
|
active_search
|
||||||
.cancellation_token
|
.cancellation_token
|
||||||
.store(true, Ordering::Relaxed);
|
.store(true, Ordering::Relaxed);
|
||||||
st.active_search = None;
|
st.active_search = None;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Schedule a search to run after debounce.
|
// Schedule a search to run after debounce.
|
||||||
if !st.is_search_scheduled {
|
if !st.is_search_scheduled {
|
||||||
@@ -187,12 +187,12 @@ impl FileSearchManager {
|
|||||||
{
|
{
|
||||||
#[expect(clippy::unwrap_used)]
|
#[expect(clippy::unwrap_used)]
|
||||||
let mut st = search_state.lock().unwrap();
|
let mut st = search_state.lock().unwrap();
|
||||||
if let Some(active_search) = &st.active_search {
|
if let Some(active_search) = &st.active_search
|
||||||
if Arc::ptr_eq(&active_search.cancellation_token, &cancellation_token) {
|
&& Arc::ptr_eq(&active_search.cancellation_token, &cancellation_token)
|
||||||
|
{
|
||||||
st.active_search = None;
|
st.active_search = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -541,8 +541,9 @@ pub(crate) fn new_status_output(
|
|||||||
|
|
||||||
// 👤 Account (only if ChatGPT tokens exist), shown under the first block
|
// 👤 Account (only if ChatGPT tokens exist), shown under the first block
|
||||||
let auth_file = get_auth_file(&config.codex_home);
|
let auth_file = get_auth_file(&config.codex_home);
|
||||||
if let Ok(auth) = try_read_auth_json(&auth_file) {
|
if let Ok(auth) = try_read_auth_json(&auth_file)
|
||||||
if let Some(tokens) = auth.tokens.clone() {
|
&& let Some(tokens) = auth.tokens.clone()
|
||||||
|
{
|
||||||
lines.push(Line::from(vec!["👤 ".into(), "Account".bold()]));
|
lines.push(Line::from(vec!["👤 ".into(), "Account".bold()]));
|
||||||
lines.push(Line::from(" • Signed in with ChatGPT"));
|
lines.push(Line::from(" • Signed in with ChatGPT"));
|
||||||
|
|
||||||
@@ -568,7 +569,6 @@ pub(crate) fn new_status_output(
|
|||||||
|
|
||||||
lines.push(Line::from(""));
|
lines.push(Line::from(""));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// 🧠 Model
|
// 🧠 Model
|
||||||
lines.push(Line::from(vec!["🧠 ".into(), "Model".bold()]));
|
lines.push(Line::from(vec!["🧠 ".into(), "Model".bold()]));
|
||||||
@@ -612,11 +612,11 @@ pub(crate) fn new_status_output(
|
|||||||
" • Input: ".into(),
|
" • Input: ".into(),
|
||||||
usage.non_cached_input().to_string().into(),
|
usage.non_cached_input().to_string().into(),
|
||||||
];
|
];
|
||||||
if let Some(cached) = usage.cached_input_tokens {
|
if let Some(cached) = usage.cached_input_tokens
|
||||||
if cached > 0 {
|
&& cached > 0
|
||||||
|
{
|
||||||
input_line_spans.push(format!(" (+ {cached} cached)").into());
|
input_line_spans.push(format!(" (+ {cached} cached)").into());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
lines.push(Line::from(input_line_spans));
|
lines.push(Line::from(input_line_spans));
|
||||||
// Output: <output>
|
// Output: <output>
|
||||||
lines.push(Line::from(vec![
|
lines.push(Line::from(vec![
|
||||||
@@ -688,17 +688,16 @@ pub(crate) fn new_mcp_tools_output(
|
|||||||
]));
|
]));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(env) = cfg.env.as_ref() {
|
if let Some(env) = cfg.env.as_ref()
|
||||||
if !env.is_empty() {
|
&& !env.is_empty()
|
||||||
let mut env_pairs: Vec<String> =
|
{
|
||||||
env.iter().map(|(k, v)| format!("{k}={v}")).collect();
|
let mut env_pairs: Vec<String> = env.iter().map(|(k, v)| format!("{k}={v}")).collect();
|
||||||
env_pairs.sort();
|
env_pairs.sort();
|
||||||
lines.push(Line::from(vec![
|
lines.push(Line::from(vec![
|
||||||
" • Env: ".into(),
|
" • Env: ".into(),
|
||||||
env_pairs.join(" ").into(),
|
env_pairs.join(" ").into(),
|
||||||
]));
|
]));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if names.is_empty() {
|
if names.is_empty() {
|
||||||
lines.push(Line::from(" • Tools: (none)"));
|
lines.push(Line::from(" • Tools: (none)"));
|
||||||
|
|||||||
@@ -123,8 +123,9 @@ impl AuthModeWidget {
|
|||||||
|
|
||||||
// If the user is already authenticated but the method differs from their
|
// If the user is already authenticated but the method differs from their
|
||||||
// preferred auth method, show a brief explanation.
|
// preferred auth method, show a brief explanation.
|
||||||
if let LoginStatus::AuthMode(current) = self.login_status {
|
if let LoginStatus::AuthMode(current) = self.login_status
|
||||||
if current != self.preferred_auth_method {
|
&& current != self.preferred_auth_method
|
||||||
|
{
|
||||||
let to_label = |mode: AuthMode| match mode {
|
let to_label = |mode: AuthMode| match mode {
|
||||||
AuthMode::ApiKey => "API key",
|
AuthMode::ApiKey => "API key",
|
||||||
AuthMode::ChatGPT => "ChatGPT",
|
AuthMode::ChatGPT => "ChatGPT",
|
||||||
@@ -137,7 +138,6 @@ impl AuthModeWidget {
|
|||||||
lines.push(Line::from(msg).style(Style::default()));
|
lines.push(Line::from(msg).style(Style::default()));
|
||||||
lines.push(Line::from(""));
|
lines.push(Line::from(""));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let create_mode_item = |idx: usize,
|
let create_mode_item = |idx: usize,
|
||||||
selected_mode: AuthMode,
|
selected_mode: AuthMode,
|
||||||
@@ -222,8 +222,9 @@ impl AuthModeWidget {
|
|||||||
spans.extend(shimmer_spans("Finish signing in via your browser"));
|
spans.extend(shimmer_spans("Finish signing in via your browser"));
|
||||||
let mut lines = vec![Line::from(spans), Line::from("")];
|
let mut lines = vec![Line::from(spans), Line::from("")];
|
||||||
|
|
||||||
if let SignInState::ChatGptContinueInBrowser(state) = &self.sign_in_state {
|
if let SignInState::ChatGptContinueInBrowser(state) = &self.sign_in_state
|
||||||
if !state.auth_url.is_empty() {
|
&& !state.auth_url.is_empty()
|
||||||
|
{
|
||||||
lines.push(Line::from(" If the link doesn't open automatically, open the following link to authenticate:"));
|
lines.push(Line::from(" If the link doesn't open automatically, open the following link to authenticate:"));
|
||||||
lines.push(Line::from(vec![
|
lines.push(Line::from(vec![
|
||||||
Span::raw(" "),
|
Span::raw(" "),
|
||||||
@@ -231,7 +232,6 @@ impl AuthModeWidget {
|
|||||||
]));
|
]));
|
||||||
lines.push(Line::from(""));
|
lines.push(Line::from(""));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
lines.push(
|
lines.push(
|
||||||
Line::from(" Press Esc to cancel").style(Style::default().add_modifier(Modifier::DIM)),
|
Line::from(" Press Esc to cancel").style(Style::default().add_modifier(Modifier::DIM)),
|
||||||
|
|||||||
@@ -96,8 +96,9 @@ impl StreamController {
|
|||||||
/// Begin a stream, flushing previously completed lines from any other
|
/// Begin a stream, flushing previously completed lines from any other
|
||||||
/// active stream to maintain ordering.
|
/// active stream to maintain ordering.
|
||||||
pub(crate) fn begin(&mut self, kind: StreamKind, sink: &impl HistorySink) {
|
pub(crate) fn begin(&mut self, kind: StreamKind, sink: &impl HistorySink) {
|
||||||
if let Some(current) = self.current_stream {
|
if let Some(current) = self.current_stream
|
||||||
if current != kind {
|
&& current != kind
|
||||||
|
{
|
||||||
// Synchronously flush completed lines from previous stream.
|
// Synchronously flush completed lines from previous stream.
|
||||||
let cfg = self.config.clone();
|
let cfg = self.config.clone();
|
||||||
let prev_state = self.state_mut(current);
|
let prev_state = self.state_mut(current);
|
||||||
@@ -116,7 +117,6 @@ impl StreamController {
|
|||||||
}
|
}
|
||||||
self.current_stream = None;
|
self.current_stream = None;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if self.current_stream != Some(kind) {
|
if self.current_stream != Some(kind) {
|
||||||
let prev = self.current_stream;
|
let prev = self.current_stream;
|
||||||
|
|||||||
@@ -53,14 +53,14 @@ pub(crate) fn format_json_compact(text: &str) -> Option<String> {
|
|||||||
}
|
}
|
||||||
' ' | '\t' if !in_string => {
|
' ' | '\t' if !in_string => {
|
||||||
// Add a space after : and , but only when not in a string
|
// Add a space after : and , but only when not in a string
|
||||||
if let Some(&next_ch) = chars.peek() {
|
if let Some(&next_ch) = chars.peek()
|
||||||
if let Some(last_ch) = result.chars().last() {
|
&& let Some(last_ch) = result.chars().last()
|
||||||
if (last_ch == ':' || last_ch == ',') && !matches!(next_ch, '}' | ']') {
|
&& (last_ch == ':' || last_ch == ',')
|
||||||
|
&& !matches!(next_ch, '}' | ']')
|
||||||
|
{
|
||||||
result.push(' ');
|
result.push(' ');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
if escape_next && in_string {
|
if escape_next && in_string {
|
||||||
escape_next = false;
|
escape_next = false;
|
||||||
|
|||||||
@@ -112,15 +112,14 @@ fn long_token_wraps() {
|
|||||||
let mut count_a = 0usize;
|
let mut count_a = 0usize;
|
||||||
for row in 0..6 {
|
for row in 0..6 {
|
||||||
for col in 0..20 {
|
for col in 0..20 {
|
||||||
if let Some(cell) = screen.cell(row, col) {
|
if let Some(cell) = screen.cell(row, col)
|
||||||
if let Some(ch) = cell.contents().chars().next() {
|
&& let Some(ch) = cell.contents().chars().next()
|
||||||
if ch == 'A' {
|
&& ch == 'A'
|
||||||
|
{
|
||||||
count_a += 1;
|
count_a += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
count_a,
|
count_a,
|
||||||
|
|||||||
Reference in New Issue
Block a user