From 31d0d7a305305ad557035a2edcab60b6be5018d8 Mon Sep 17 00:00:00 2001 From: Michael Bolin Date: Thu, 24 Apr 2025 13:31:40 -0700 Subject: [PATCH] feat: initial import of Rust implementation of Codex CLI in codex-rs/ (#629) As stated in `codex-rs/README.md`: Today, Codex CLI is written in TypeScript and requires Node.js 22+ to run it. For a number of users, this runtime requirement inhibits adoption: they would be better served by a standalone executable. As maintainers, we want Codex to run efficiently in a wide range of environments with minimal overhead. We also want to take advantage of operating system-specific APIs to provide better sandboxing, where possible. To that end, we are moving forward with a Rust implementation of Codex CLI contained in this folder, which has the following benefits: - The CLI compiles to small, standalone, platform-specific binaries. - Can make direct, native calls to [seccomp](https://man7.org/linux/man-pages/man2/seccomp.2.html) and [landlock](https://man7.org/linux/man-pages/man7/landlock.7.html) in order to support sandboxing on Linux. - No runtime garbage collection, resulting in lower memory consumption and better, more predictable performance. Currently, the Rust implementation is materially behind the TypeScript implementation in functionality, so continue to use the TypeScript implmentation for the time being. We will publish native executables via GitHub Releases as soon as we feel the Rust version is usable. --- .github/workflows/rust-ci.yml | 87 + codex-rs/.gitignore | 1 + codex-rs/Cargo.lock | 3475 +++++++++++++++++ codex-rs/Cargo.toml | 12 + codex-rs/README.md | 24 + codex-rs/ansi-escape/Cargo.toml | 16 + codex-rs/ansi-escape/README.md | 15 + codex-rs/ansi-escape/src/lib.rs | 39 + codex-rs/apply-patch/Cargo.toml | 21 + codex-rs/apply-patch/src/lib.rs | 1020 +++++ codex-rs/apply-patch/src/parser.rs | 499 +++ codex-rs/apply-patch/src/seek_sequence.rs | 107 + codex-rs/cli/Cargo.toml | 27 + codex-rs/cli/src/main.rs | 112 + codex-rs/cli/src/proto.rs | 94 + codex-rs/cli/src/seatbelt.rs | 17 + codex-rs/core/Cargo.toml | 62 + codex-rs/core/README.md | 10 + codex-rs/core/prompt.md | 98 + codex-rs/core/src/approval_mode_cli_arg.rs | 61 + codex-rs/core/src/client.rs | 374 ++ codex-rs/core/src/codex.rs | 1448 +++++++ codex-rs/core/src/codex_wrapper.rs | 85 + codex-rs/core/src/config.rs | 42 + codex-rs/core/src/error.rs | 103 + codex-rs/core/src/exec.rs | 277 ++ codex-rs/core/src/flags.rs | 30 + codex-rs/core/src/is_safe_command.rs | 332 ++ codex-rs/core/src/lib.rs | 30 + codex-rs/core/src/linux.rs | 320 ++ codex-rs/core/src/models.rs | 175 + codex-rs/core/src/protocol.rs | 275 ++ codex-rs/core/src/safety.rs | 236 ++ .../core/src/seatbelt_readonly_policy.sbpl | 70 + codex-rs/core/src/util.rs | 68 + codex-rs/core/tests/live_agent.rs | 219 ++ codex-rs/core/tests/live_cli.rs | 143 + codex-rs/core/tests/previous_response_id.rs | 156 + codex-rs/core/tests/stream_no_completed.rs | 109 + codex-rs/docs/protocol_v1.md | 172 + codex-rs/exec/Cargo.toml | 26 + codex-rs/exec/src/cli.rs | 21 + codex-rs/exec/src/lib.rs | 208 + codex-rs/exec/src/main.rs | 11 + codex-rs/interactive/Cargo.toml | 24 + codex-rs/interactive/src/cli.rs | 33 + codex-rs/interactive/src/lib.rs | 7 + codex-rs/interactive/src/main.rs | 11 + codex-rs/justfile | 19 + codex-rs/repl/Cargo.toml | 28 + codex-rs/repl/src/cli.rs | 60 + codex-rs/repl/src/lib.rs | 423 ++ codex-rs/repl/src/main.rs | 11 + codex-rs/rustfmt.toml | 4 + codex-rs/tui/Cargo.toml | 37 + codex-rs/tui/src/app.rs | 194 + codex-rs/tui/src/app_event.rs | 17 + codex-rs/tui/src/bottom_pane.rs | 303 ++ codex-rs/tui/src/chatwidget.rs | 387 ++ codex-rs/tui/src/cli.rs | 41 + .../tui/src/conversation_history_widget.rs | 379 ++ codex-rs/tui/src/exec_command.rs | 62 + codex-rs/tui/src/git_warning_screen.rs | 122 + codex-rs/tui/src/history_cell.rs | 271 ++ codex-rs/tui/src/lib.rs | 165 + codex-rs/tui/src/log_layer.rs | 94 + codex-rs/tui/src/main.rs | 10 + codex-rs/tui/src/status_indicator_widget.rs | 214 + codex-rs/tui/src/tui.rs | 37 + codex-rs/tui/src/user_approval_widget.rs | 395 ++ codex-rs/tui/tests/status_indicator.rs | 24 + 71 files changed, 14099 insertions(+) create mode 100644 .github/workflows/rust-ci.yml create mode 100644 codex-rs/.gitignore create mode 100644 codex-rs/Cargo.lock create mode 100644 codex-rs/Cargo.toml create mode 100644 codex-rs/README.md create mode 100644 codex-rs/ansi-escape/Cargo.toml create mode 100644 codex-rs/ansi-escape/README.md create mode 100644 codex-rs/ansi-escape/src/lib.rs create mode 100644 codex-rs/apply-patch/Cargo.toml create mode 100644 codex-rs/apply-patch/src/lib.rs create mode 100644 codex-rs/apply-patch/src/parser.rs create mode 100644 codex-rs/apply-patch/src/seek_sequence.rs create mode 100644 codex-rs/cli/Cargo.toml create mode 100644 codex-rs/cli/src/main.rs create mode 100644 codex-rs/cli/src/proto.rs create mode 100644 codex-rs/cli/src/seatbelt.rs create mode 100644 codex-rs/core/Cargo.toml create mode 100644 codex-rs/core/README.md create mode 100644 codex-rs/core/prompt.md create mode 100644 codex-rs/core/src/approval_mode_cli_arg.rs create mode 100644 codex-rs/core/src/client.rs create mode 100644 codex-rs/core/src/codex.rs create mode 100644 codex-rs/core/src/codex_wrapper.rs create mode 100644 codex-rs/core/src/config.rs create mode 100644 codex-rs/core/src/error.rs create mode 100644 codex-rs/core/src/exec.rs create mode 100644 codex-rs/core/src/flags.rs create mode 100644 codex-rs/core/src/is_safe_command.rs create mode 100644 codex-rs/core/src/lib.rs create mode 100644 codex-rs/core/src/linux.rs create mode 100644 codex-rs/core/src/models.rs create mode 100644 codex-rs/core/src/protocol.rs create mode 100644 codex-rs/core/src/safety.rs create mode 100644 codex-rs/core/src/seatbelt_readonly_policy.sbpl create mode 100644 codex-rs/core/src/util.rs create mode 100644 codex-rs/core/tests/live_agent.rs create mode 100644 codex-rs/core/tests/live_cli.rs create mode 100644 codex-rs/core/tests/previous_response_id.rs create mode 100644 codex-rs/core/tests/stream_no_completed.rs create mode 100644 codex-rs/docs/protocol_v1.md create mode 100644 codex-rs/exec/Cargo.toml create mode 100644 codex-rs/exec/src/cli.rs create mode 100644 codex-rs/exec/src/lib.rs create mode 100644 codex-rs/exec/src/main.rs create mode 100644 codex-rs/interactive/Cargo.toml create mode 100644 codex-rs/interactive/src/cli.rs create mode 100644 codex-rs/interactive/src/lib.rs create mode 100644 codex-rs/interactive/src/main.rs create mode 100644 codex-rs/justfile create mode 100644 codex-rs/repl/Cargo.toml create mode 100644 codex-rs/repl/src/cli.rs create mode 100644 codex-rs/repl/src/lib.rs create mode 100644 codex-rs/repl/src/main.rs create mode 100644 codex-rs/rustfmt.toml create mode 100644 codex-rs/tui/Cargo.toml create mode 100644 codex-rs/tui/src/app.rs create mode 100644 codex-rs/tui/src/app_event.rs create mode 100644 codex-rs/tui/src/bottom_pane.rs create mode 100644 codex-rs/tui/src/chatwidget.rs create mode 100644 codex-rs/tui/src/cli.rs create mode 100644 codex-rs/tui/src/conversation_history_widget.rs create mode 100644 codex-rs/tui/src/exec_command.rs create mode 100644 codex-rs/tui/src/git_warning_screen.rs create mode 100644 codex-rs/tui/src/history_cell.rs create mode 100644 codex-rs/tui/src/lib.rs create mode 100644 codex-rs/tui/src/log_layer.rs create mode 100644 codex-rs/tui/src/main.rs create mode 100644 codex-rs/tui/src/status_indicator_widget.rs create mode 100644 codex-rs/tui/src/tui.rs create mode 100644 codex-rs/tui/src/user_approval_widget.rs create mode 100644 codex-rs/tui/tests/status_indicator.rs diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml new file mode 100644 index 00000000..1867949d --- /dev/null +++ b/.github/workflows/rust-ci.yml @@ -0,0 +1,87 @@ +name: rust-ci +on: + pull_request: { branches: [main] } + push: { branches: [main] } + +# For CI, we build in debug (`--profile dev`) rather than release mode so we +# get signal faster. + +jobs: + macos: + runs-on: macos-14 + timeout-minutes: 30 + defaults: + run: + working-directory: codex-rs + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + targets: aarch64-apple-darwin,x86_64-apple-darwin + + - name: Initialize failure flag + run: echo "FAILED=" >> $GITHUB_ENV + + - name: cargo fmt + run: cargo fmt -- --config imports_granularity=Item --check || echo "FAILED=${FAILED:+$FAILED, }cargo fmt" >> $GITHUB_ENV + + - name: cargo test + run: cargo test || echo "FAILED=${FAILED:+$FAILED, }cargo test" >> $GITHUB_ENV + + - name: cargo clippy + run: cargo clippy --all-features -- -D warnings || echo "FAILED=${FAILED:+$FAILED, }cargo clippy" >> $GITHUB_ENV + + - name: arm64 build + run: cargo build --target aarch64-apple-darwin || echo "FAILED=${FAILED:+$FAILED, }arm64 build" >> $GITHUB_ENV + + - name: x86_64 build + run: cargo build --target x86_64-apple-darwin || echo "FAILED=${FAILED:+$FAILED, }x86_64 build" >> $GITHUB_ENV + + - name: Fail if any step failed + run: | + if [ -n "$FAILED" ]; then + echo -e "See logs above, as the following steps failed:\n$FAILED" + exit 1 + fi + env: + FAILED: ${{ env.FAILED }} + + linux-musl-x86_64: + runs-on: ubuntu-24.04 + timeout-minutes: 30 + defaults: + run: + working-directory: codex-rs + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + targets: x86_64-unknown-linux-musl + - name: Install musl build tools + run: | + sudo apt update + sudo apt install -y musl-tools pkg-config + + - name: Initialize failure flag + run: echo "FAILED=" >> $GITHUB_ENV + + - name: cargo fmt + run: cargo fmt -- --config imports_granularity=Item --check || echo "FAILED=${FAILED:+$FAILED, }cargo fmt" >> $GITHUB_ENV + + - name: cargo test + run: cargo test || echo "FAILED=${FAILED:+$FAILED, }cargo test" >> $GITHUB_ENV + + - name: cargo clippy + run: cargo clippy --all-features -- -D warnings || echo "FAILED=${FAILED:+$FAILED, }cargo clippy" >> $GITHUB_ENV + + - name: x86_64 musl build + run: cargo build --target x86_64-unknown-linux-musl || echo "FAILED=${FAILED:+$FAILED, }x86_64 musl build" >> $GITHUB_ENV + + - name: Fail if any step failed + run: | + if [ -n "$FAILED" ]; then + echo -e "See logs above, as the following steps failed:\n$FAILED" + exit 1 + fi + env: + FAILED: ${{ env.FAILED }} diff --git a/codex-rs/.gitignore b/codex-rs/.gitignore new file mode 100644 index 00000000..b83d2226 --- /dev/null +++ b/codex-rs/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock new file mode 100644 index 00000000..f9f58608 --- /dev/null +++ b/codex-rs/Cargo.lock @@ -0,0 +1,3475 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "ansi-to-tui" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67555e1f1ece39d737e28c8a017721287753af3f93225e4a445b29ccb0f5912c" +dependencies = [ + "nom", + "ratatui", + "simdutf8", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +dependencies = [ + "anstyle", + "once_cell", + "windows-sys 0.59.0", +] + +[[package]] +name = "anyhow" +version = "1.0.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" + +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "assert_cmd" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd389a4b2970a01282ee455294913c0a43724daedcd1a24c3eb0ec1c1320b66" +dependencies = [ + "anstyle", + "bstr", + "doc-comment", + "libc", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + +[[package]] +name = "async-channel" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-trait" +version = "0.1.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "backtrace" +version = "0.3.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" + +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "bstr" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" +dependencies = [ + "memchr", + "regex-automata 0.4.9", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" + +[[package]] +name = "bytecount" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "cassowary" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" + +[[package]] +name = "castaway" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0abae9be0aaf9ea96a3b1b8b1b55c602ca751eba1b1500220cea4ecbafe7c0d5" +dependencies = [ + "rustversion", +] + +[[package]] +name = "cc" +version = "1.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e3a13707ac958681c13b39b458c073d0d9bc8a22cb1b2f4c8e55eb72c13f362" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "clap" +version = "4.5.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", + "terminal_size", +] + +[[package]] +name = "clap_derive" +version = "4.5.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "codex-ansi-escape" +version = "0.1.0" +dependencies = [ + "ansi-to-tui", + "ratatui", + "tracing", +] + +[[package]] +name = "codex-apply-patch" +version = "0.1.0" +dependencies = [ + "anyhow", + "pretty_assertions", + "regex", + "serde_json", + "similar", + "tempfile", + "thiserror 2.0.12", + "tree-sitter", + "tree-sitter-bash", +] + +[[package]] +name = "codex-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "codex-core", + "codex-exec", + "codex-interactive", + "codex-repl", + "codex-tui", + "serde_json", + "tokio", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "codex-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_cmd", + "async-channel", + "base64 0.21.7", + "bytes", + "clap", + "codex-apply-patch", + "dirs 6.0.0", + "env-flags", + "eventsource-stream", + "expanduser", + "fs-err", + "futures", + "landlock", + "libc", + "mime_guess", + "openssl-sys", + "patch", + "predicates", + "rand", + "reqwest", + "seccompiler", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.12", + "tokio", + "tokio-util", + "toml", + "tracing", + "tree-sitter", + "tree-sitter-bash", + "wiremock", +] + +[[package]] +name = "codex-exec" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "codex-core", + "tokio", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "codex-interactive" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "codex-core", + "tokio", +] + +[[package]] +name = "codex-repl" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "codex-core", + "owo-colors 4.2.0", + "rand", + "tokio", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "codex-tui" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "codex-ansi-escape", + "codex-core", + "color-eyre", + "crossterm", + "ratatui", + "shlex", + "tokio", + "tracing", + "tracing-appender", + "tracing-subscriber", + "tui-input", + "tui-textarea", +] + +[[package]] +name = "color-eyre" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" +dependencies = [ + "backtrace", + "color-spantrace", + "eyre", + "indenter", + "once_cell", + "owo-colors 3.5.0", + "tracing-error", +] + +[[package]] +name = "color-spantrace" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +dependencies = [ + "once_cell", + "owo-colors 3.5.0", + "tracing-core", + "tracing-error", +] + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "compact_str" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" +dependencies = [ + "castaway", + "cfg-if", + "itoa", + "rustversion", + "ryu", + "static_assertions", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crossterm" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" +dependencies = [ + "bitflags", + "crossterm_winapi", + "mio", + "parking_lot", + "rustix 0.38.44", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "deadpool" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" +dependencies = [ + "async-trait", + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" + +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "dirs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" +dependencies = [ + "libc", + "redox_users 0.3.5", + "winapi", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.5.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enumflags2" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba2f4b465f5318854c6f8dd686ede6c0a9dc67d4b1ac241cf0eb51521a309147" +dependencies = [ + "enumflags2_derive", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc4caf64a58d7a6d65ab00639b046ff54399a39f5f2554728895ace4b297cd79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "env-flags" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbfd0e7fc632dec5e6c9396a27bc9f9975b4e039720e1fd3e34021d3ce28c415" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "event-listener" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "eventsource-stream" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab" +dependencies = [ + "futures-core", + "nom", + "pin-project-lite", +] + +[[package]] +name = "expanduser" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14e0b79235da57db6b6c2beed9af6e5de867d63a973ae3e91910ddc33ba40bc0" +dependencies = [ + "dirs 1.0.5", + "lazy_static", + "pwd", +] + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "float-cmp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" +dependencies = [ + "num-traits", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs-err" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f89bda4c2a21204059a977ed3bfe746677dfd137b83c339e702b0ac91d482aa" +dependencies = [ + "autocfg", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "h2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75249d144030531f8dee69fe9cea04d3edf809a017ae445e2abdff6629e86633" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "libc", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "indexmap" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "indoc" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" + +[[package]] +name = "instability" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf9fed6d91cfb734e7476a06bde8300a1b94e217e1b523b6f0cd1a01998c71d" +dependencies = [ + "darling", + "indoc", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "landlock" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18738c5d4c7fae6727a96adb94722ef7ce82f3eafea0a11777e258a93816537e" +dependencies = [ + "enumflags2", + "libc", + "thiserror 1.0.69", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.172" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags", + "libc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "linux-raw-sys" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "litemap" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom_locate" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e3c83c053b0713da60c5b8de47fe8e494fe3ece5267b2f23090a07a053ba8f3" +dependencies = [ + "bytecount", + "memchr", + "nom", +] + +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "openssl" +version = "0.10.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-src" +version = "300.5.0+3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8ce546f549326b0e6052b649198487d91320875da901e7bd11a06d1ee3f9c2f" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "owo-colors" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" + +[[package]] +name = "owo-colors" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.11", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "patch" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15c07fdcdd8b05bdcf2a25bc195b6c34cbd52762ada9dba88bf81e7686d14e7a" +dependencies = [ + "chrono", + "nom", + "nom_locate", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "predicates" +version = "3.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" +dependencies = [ + "anstyle", + "difflib", + "float-cmp", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" + +[[package]] +name = "predicates-tree" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "pwd" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c71c0c79b9701efe4e1e4b563b2016dd4ee789eb99badcb09d61ac4b92e4a2" +dependencies = [ + "libc", + "thiserror 1.0.69", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + +[[package]] +name = "rand" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.2", +] + +[[package]] +name = "ratatui" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" +dependencies = [ + "bitflags", + "cassowary", + "compact_str", + "crossterm", + "indoc", + "instability", + "itertools", + "lru", + "paste", + "strum", + "unicode-segmentation", + "unicode-truncate", + "unicode-width 0.2.0", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "redox_syscall" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom 0.1.16", + "redox_syscall 0.1.57", + "rust-argon2", +] + +[[package]] +name = "redox_users" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 2.0.12", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.12.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tokio-util", + "tower", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rust-argon2" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" +dependencies = [ + "base64 0.13.1", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustix" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.9.4", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls" +version = "0.23.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df51b5869f3a441595eac5e8ff14d486ff285f7b8c0df8770e49c3b56351f0f0" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" + +[[package]] +name = "rustls-webpki" +version = "0.103.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "seccompiler" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4ae55de56877481d112a559bbc12667635fdaf5e005712fd4e2b2fa50ffc884" +dependencies = [ + "libc", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +dependencies = [ + "indexmap", + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" +dependencies = [ + "libc", + "mio", + "signal-hook", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" +dependencies = [ + "libc", +] + +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" + +[[package]] +name = "socket2" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +dependencies = [ + "fastrand", + "getrandom 0.3.2", + "once_cell", + "rustix 1.0.5", + "windows-sys 0.59.0", +] + +[[package]] +name = "terminal_size" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +dependencies = [ + "rustix 1.0.5", + "windows-sys 0.59.0", +] + +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.44.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b9590b93e6fcc1739458317cccd391ad3955e2bde8913edf6f95f9e65a8f034" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror 1.0.69", + "time", + "tracing-subscriber", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-error" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db" +dependencies = [ + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tree-sitter" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ac5ea5e7f2f1700842ec071401010b9c59bf735295f6e9fa079c3dc035b167" +dependencies = [ + "cc", + "regex", + "regex-syntax 0.8.5", + "serde_json", + "streaming-iterator", + "tree-sitter-language", +] + +[[package]] +name = "tree-sitter-bash" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "329a4d48623ac337d42b1df84e81a1c9dbb2946907c102ca72db158c1964a52e" +dependencies = [ + "cc", + "tree-sitter-language", +] + +[[package]] +name = "tree-sitter-language" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4013970217383f67b18aef68f6fb2e8d409bc5755227092d32efb0422ba24b8" + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tui-input" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d1733c47f1a217b7deff18730ff7ca4ecafc5771368f715ab072d679a36114" +dependencies = [ + "ratatui", + "unicode-width 0.2.0", +] + +[[package]] +name = "tui-textarea" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a5318dd619ed73c52a9417ad19046724effc1287fb75cdcc4eca1d6ac1acbae" +dependencies = [ + "crossterm", + "ratatui", + "unicode-width 0.2.0", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-truncate" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" +dependencies = [ + "itertools", + "unicode-segmentation", + "unicode-width 0.1.14", +] + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.61.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings 0.4.0", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" + +[[package]] +name = "windows-registry" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" +dependencies = [ + "windows-result", + "windows-strings 0.3.1", + "windows-targets 0.53.0", +] + +[[package]] +name = "windows-result" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63d3fcd9bba44b03821e7d699eeee959f3126dcc4aa8e4ae18ec617c2a5cea10" +dependencies = [ + "memchr", +] + +[[package]] +name = "wiremock" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "101681b74cd87b5899e87bcf5a64e83334dd313fcd3053ea72e6dba18928e301" +dependencies = [ + "assert-json-diff", + "async-trait", + "base64 0.22.1", + "deadpool", + "futures", + "http", + "http-body-util", + "hyper", + "hyper-util", + "log", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", + "url", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/codex-rs/Cargo.toml b/codex-rs/Cargo.toml new file mode 100644 index 00000000..f3f66eb2 --- /dev/null +++ b/codex-rs/Cargo.toml @@ -0,0 +1,12 @@ +[workspace] +resolver = "2" +members = [ + "ansi-escape", + "apply-patch", + "cli", + "core", + "exec", + "interactive", + "repl", + "tui", +] diff --git a/codex-rs/README.md b/codex-rs/README.md new file mode 100644 index 00000000..309ef033 --- /dev/null +++ b/codex-rs/README.md @@ -0,0 +1,24 @@ +# codex-rs + +April 24, 2025 + +Today, Codex CLI is written in TypeScript and requires Node.js 22+ to run it. For a number of users, this runtime requirement inhibits adoption: they would be better served by a standalone executable. As maintainers, we want Codex to run efficiently in a wide range of environments with minimal overhead. We also want to take advantage of operating system-specific APIs to provide better sandboxing, where possible. + +To that end, we are moving forward with a Rust implementation of Codex CLI contained in this folder, which has the following benefits: + +- The CLI compiles to small, standalone, platform-specific binaries. +- Can make direct, native calls to [seccomp](https://man7.org/linux/man-pages/man2/seccomp.2.html) and [landlock](https://man7.org/linux/man-pages/man7/landlock.7.html) in order to support sandboxing on Linux. +- No runtime garbage collection, resulting in lower memory consumption and better, more predictable performance. + +Currently, the Rust implementation is materially behind the TypeScript implementation in functionality, so continue to use the TypeScript implmentation for the time being. We will publish native executables via GitHub Releases as soon as we feel the Rust version is usable. + +## Code Organization + +This folder is the root of a Cargo workspace. It contains quite a bit of experimental code, but here are the key crates: + +- [`core/`](./core) contains the business logic for Codex. Ultimately, we hope this to be a library crate that is generally useful for building other Rust/native applications that use Codex. +- [`interactive/`](./interactive) CLI with a UX comparable to the TypeScript Codex CLI. +- [`exec/`](./exec) "headless" CLI for use in automation. +- [`tui/`](./tui) CLI that launches a fullscreen TUI built with [Ratatui](https://ratatui.rs/). +- [`repl/`](./repl) CLI that launches a lightweight REPL similar to the Python or Node.js REPL. +- [`cli/`](./cli) CLI multitool that provides the aforementioned CLIs via subcommands. diff --git a/codex-rs/ansi-escape/Cargo.toml b/codex-rs/ansi-escape/Cargo.toml new file mode 100644 index 00000000..f1832eda --- /dev/null +++ b/codex-rs/ansi-escape/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "codex-ansi-escape" +version = "0.1.0" +edition = "2021" + +[lib] +name = "codex_ansi_escape" +path = "src/lib.rs" + +[dependencies] +ansi-to-tui = "7.0.0" +ratatui = { version = "0.29.0", features = [ + "unstable-widget-ref", + "unstable-rendered-line-info", +] } +tracing = { version = "0.1.41", features = ["log"] } diff --git a/codex-rs/ansi-escape/README.md b/codex-rs/ansi-escape/README.md new file mode 100644 index 00000000..19f239cb --- /dev/null +++ b/codex-rs/ansi-escape/README.md @@ -0,0 +1,15 @@ +# oai-codex-ansi-escape + +Small helper functions that wrap functionality from +: + +```rust +pub fn ansi_escape_line(s: &str) -> Line<'static> +pub fn ansi_escape<'a>(s: &'a str) -> Text<'a> +``` + +Advantages: + +- `ansi_to_tui::IntoText` is not in scope for the entire TUI crate +- we `panic!()` and log if `IntoText` returns an `Err` and log it so that + the caller does not have to deal with it diff --git a/codex-rs/ansi-escape/src/lib.rs b/codex-rs/ansi-escape/src/lib.rs new file mode 100644 index 00000000..3daaf46e --- /dev/null +++ b/codex-rs/ansi-escape/src/lib.rs @@ -0,0 +1,39 @@ +use ansi_to_tui::Error; +use ansi_to_tui::IntoText; +use ratatui::text::Line; +use ratatui::text::Text; + +/// This function should be used when the contents of `s` are expected to match +/// a single line. If multiple lines are found, a warning is logged and only the +/// first line is returned. +pub fn ansi_escape_line(s: &str) -> Line<'static> { + let text = ansi_escape(s); + match text.lines.as_slice() { + [] => Line::from(""), + [only] => only.clone(), + [first, rest @ ..] => { + tracing::warn!("ansi_escape_line: expected a single line, got {first:?} and {rest:?}"); + first.clone() + } + } +} + +pub fn ansi_escape(s: &str) -> Text<'static> { + // to_text() claims to be faster, but introduces complex lifetime issues + // such that it's not worth it. + match s.into_text() { + Ok(text) => text, + Err(err) => match err { + Error::NomError(message) => { + tracing::error!( + "ansi_to_tui NomError docs claim should never happen when parsing `{s}`: {message}" + ); + panic!(); + } + Error::Utf8Error(utf8error) => { + tracing::error!("Utf8Error: {utf8error}"); + panic!(); + } + }, + } +} diff --git a/codex-rs/apply-patch/Cargo.toml b/codex-rs/apply-patch/Cargo.toml new file mode 100644 index 00000000..ab24ee62 --- /dev/null +++ b/codex-rs/apply-patch/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "codex-apply-patch" +version = "0.1.0" +edition = "2021" + +[lib] +name = "codex_apply_patch" +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +regex = "1.11.1" +serde_json = "1.0.110" +similar = "2.7.0" +thiserror = "2.0.12" +tree-sitter = "0.25.3" +tree-sitter-bash = "0.23.3" + +[dev-dependencies] +pretty_assertions = "1.4.1" +tempfile = "3.13.0" diff --git a/codex-rs/apply-patch/src/lib.rs b/codex-rs/apply-patch/src/lib.rs new file mode 100644 index 00000000..05ea7496 --- /dev/null +++ b/codex-rs/apply-patch/src/lib.rs @@ -0,0 +1,1020 @@ +mod parser; +mod seek_sequence; + +use std::collections::HashMap; +use std::path::Path; +use std::path::PathBuf; + +use anyhow::Context; +use anyhow::Error; +use anyhow::Result; +pub use parser::parse_patch; +pub use parser::Hunk; +pub use parser::ParseError; +use parser::ParseError::*; +use parser::UpdateFileChunk; +use similar::TextDiff; +use thiserror::Error; +use tree_sitter::Parser; +use tree_sitter_bash::LANGUAGE as BASH; + +#[derive(Debug, Error)] +pub enum ApplyPatchError { + #[error(transparent)] + ParseError(#[from] ParseError), + #[error(transparent)] + IoError(#[from] IoError), + /// Error that occurs while computing replacements when applying patch chunks + #[error("{0}")] + ComputeReplacements(String), +} + +impl From for ApplyPatchError { + fn from(err: std::io::Error) -> Self { + ApplyPatchError::IoError(IoError { + context: "I/O error".to_string(), + source: err, + }) + } +} + +#[derive(Debug, Error)] +#[error("{context}: {source}")] +pub struct IoError { + context: String, + #[source] + source: std::io::Error, +} + +#[derive(Debug)] +pub enum MaybeApplyPatch { + Body(Vec), + ShellParseError(Error), + PatchParseError(ParseError), + NotApplyPatch, +} + +pub fn maybe_parse_apply_patch(argv: &[String]) -> MaybeApplyPatch { + match argv { + [cmd, body] if cmd == "apply_patch" => match parse_patch(body) { + Ok(hunks) => MaybeApplyPatch::Body(hunks), + Err(e) => MaybeApplyPatch::PatchParseError(e), + }, + [bash, flag, script] + if bash == "bash" + && flag == "-lc" + && script.trim_start().starts_with("apply_patch") => + { + match extract_heredoc_body_from_apply_patch_command(script) { + Ok(body) => match parse_patch(&body) { + Ok(hunks) => MaybeApplyPatch::Body(hunks), + Err(e) => MaybeApplyPatch::PatchParseError(e), + }, + Err(e) => MaybeApplyPatch::ShellParseError(e), + } + } + _ => MaybeApplyPatch::NotApplyPatch, + } +} + +#[derive(Debug)] +pub enum ApplyPatchFileChange { + Add { + content: String, + }, + Delete, + Update { + unified_diff: String, + move_path: Option, + }, +} + +#[derive(Debug)] +pub enum MaybeApplyPatchVerified { + /// `argv` corresponded to an `apply_patch` invocation, and these are the + /// resulting proposed file changes. + Body(HashMap), + /// `argv` could not be parsed to determine whether it corresponds to an + /// `apply_patch` invocation. + ShellParseError(Error), + /// `argv` corresponded to an `apply_patch` invocation, but it could not + /// be fulfilled due to the specified error. + CorrectnessError(ApplyPatchError), + /// `argv` decidedly did not correspond to an `apply_patch` invocation. + NotApplyPatch, +} + +pub fn maybe_parse_apply_patch_verified(argv: &[String]) -> MaybeApplyPatchVerified { + match maybe_parse_apply_patch(argv) { + MaybeApplyPatch::Body(hunks) => { + let mut changes = HashMap::new(); + for hunk in hunks { + match hunk { + Hunk::AddFile { path, contents } => { + changes.insert( + path, + ApplyPatchFileChange::Add { + content: contents.clone(), + }, + ); + } + Hunk::DeleteFile { path } => { + changes.insert(path, ApplyPatchFileChange::Delete); + } + Hunk::UpdateFile { + path, + move_path, + chunks, + } => { + let unified_diff = match unified_diff_from_chunks(&path, &chunks) { + Ok(diff) => diff, + Err(e) => { + return MaybeApplyPatchVerified::CorrectnessError(e); + } + }; + changes.insert( + path.clone(), + ApplyPatchFileChange::Update { + unified_diff, + move_path, + }, + ); + } + } + } + MaybeApplyPatchVerified::Body(changes) + } + MaybeApplyPatch::ShellParseError(e) => MaybeApplyPatchVerified::ShellParseError(e), + MaybeApplyPatch::PatchParseError(e) => MaybeApplyPatchVerified::CorrectnessError(e.into()), + MaybeApplyPatch::NotApplyPatch => MaybeApplyPatchVerified::NotApplyPatch, + } +} + +/// Attempts to extract a heredoc_body object from a string bash command like: +/// Optimistically +/// +/// ```bash +/// bash -lc 'apply_patch < anyhow::Result { + if !src.trim_start().starts_with("apply_patch") { + anyhow::bail!("expected command to start with 'apply_patch'"); + } + + let lang = BASH.into(); + let mut parser = Parser::new(); + parser.set_language(&lang).expect("load bash grammar"); + let tree = parser + .parse(src, None) + .ok_or_else(|| anyhow::anyhow!("failed to parse patch into AST"))?; + + let bytes = src.as_bytes(); + let mut c = tree.root_node().walk(); + + loop { + let node = c.node(); + if node.kind() == "heredoc_body" { + let text = node.utf8_text(bytes).unwrap(); + return Ok(text.trim_end_matches('\n').to_owned()); + } + + if c.goto_first_child() { + continue; + } + while !c.goto_next_sibling() { + if !c.goto_parent() { + anyhow::bail!("expected to find heredoc_body in patch candidate"); + } + } + } +} + +/// Applies the patch and prints the result to stdout/stderr. +pub fn apply_patch( + patch: &str, + stdout: &mut impl std::io::Write, + stderr: &mut impl std::io::Write, +) -> Result<(), ApplyPatchError> { + let hunks = match parse_patch(patch) { + Ok(hunks) => hunks, + Err(e) => { + match &e { + InvalidPatchError(message) => { + writeln!(stderr, "Invalid patch: {message}").map_err(ApplyPatchError::from)?; + } + InvalidHunkError { + message, + line_number, + } => { + writeln!( + stderr, + "Invalid patch hunk on line {line_number}: {message}" + ) + .map_err(ApplyPatchError::from)?; + } + } + return Err(ApplyPatchError::ParseError(e)); + } + }; + + apply_hunks(&hunks, stdout, stderr)?; + + Ok(()) +} + +/// Applies hunks and continues to update stdout/stderr +pub fn apply_hunks( + hunks: &[Hunk], + stdout: &mut impl std::io::Write, + stderr: &mut impl std::io::Write, +) -> Result<(), ApplyPatchError> { + let _existing_paths: Vec<&Path> = hunks + .iter() + .filter_map(|hunk| match hunk { + Hunk::AddFile { .. } => { + // The file is being added, so it doesn't exist yet. + None + } + Hunk::DeleteFile { path } => Some(path.as_path()), + Hunk::UpdateFile { + path, move_path, .. + } => match move_path { + Some(move_path) => { + if std::fs::metadata(move_path) + .map(|m| m.is_file()) + .unwrap_or(false) + { + Some(move_path.as_path()) + } else { + None + } + } + None => Some(path.as_path()), + }, + }) + .collect::>(); + + // Delegate to a helper that applies each hunk to the filesystem. + match apply_hunks_to_files(hunks) { + Ok(affected) => { + print_summary(&affected, stdout).map_err(ApplyPatchError::from)?; + } + Err(err) => { + writeln!(stderr, "{err:?}").map_err(ApplyPatchError::from)?; + } + } + + Ok(()) +} + +/// Applies each parsed patch hunk to the filesystem. +/// Returns an error if any of the changes could not be applied. +/// Tracks file paths affected by applying a patch. +pub struct AffectedPaths { + pub added: Vec, + pub modified: Vec, + pub deleted: Vec, +} + +/// Apply the hunks to the filesystem, returning which files were added, modified, or deleted. +/// Returns an error if the patch could not be applied. +fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result { + if hunks.is_empty() { + anyhow::bail!("No files were modified."); + } + + let mut added: Vec = Vec::new(); + let mut modified: Vec = Vec::new(); + let mut deleted: Vec = Vec::new(); + for hunk in hunks { + match hunk { + Hunk::AddFile { path, contents } => { + if let Some(parent) = path.parent() { + if !parent.as_os_str().is_empty() { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directories for {}", path.display()) + })?; + } + } + std::fs::write(path, contents) + .with_context(|| format!("Failed to write file {}", path.display()))?; + added.push(path.clone()); + } + Hunk::DeleteFile { path } => { + std::fs::remove_file(path) + .with_context(|| format!("Failed to delete file {}", path.display()))?; + deleted.push(path.clone()); + } + Hunk::UpdateFile { + path, + move_path, + chunks, + } => { + let AppliedPatch { new_contents, .. } = + derive_new_contents_from_chunks(path, chunks)?; + if let Some(dest) = move_path { + if let Some(parent) = dest.parent() { + if !parent.as_os_str().is_empty() { + std::fs::create_dir_all(parent).with_context(|| { + format!( + "Failed to create parent directories for {}", + dest.display() + ) + })?; + } + } + std::fs::write(dest, new_contents) + .with_context(|| format!("Failed to write file {}", dest.display()))?; + std::fs::remove_file(path) + .with_context(|| format!("Failed to remove original {}", path.display()))?; + modified.push(dest.clone()); + } else { + std::fs::write(path, new_contents) + .with_context(|| format!("Failed to write file {}", path.display()))?; + modified.push(path.clone()); + } + } + } + } + Ok(AffectedPaths { + added, + modified, + deleted, + }) +} + +struct AppliedPatch { + original_contents: String, + new_contents: String, +} + +/// Return *only* the new file contents (joined into a single `String`) after +/// applying the chunks to the file at `path`. +fn derive_new_contents_from_chunks( + path: &Path, + chunks: &[UpdateFileChunk], +) -> std::result::Result { + let original_contents = match std::fs::read_to_string(path) { + Ok(contents) => contents, + Err(err) => { + return Err(ApplyPatchError::IoError(IoError { + context: format!("Failed to read file to update {}", path.display()), + source: err, + })) + } + }; + + let mut original_lines: Vec = original_contents + .split('\n') + .map(|s| s.to_string()) + .collect(); + + // Drop the trailing empty element that results from the final newline so + // that line counts match the behaviour of standard `diff`. + if original_lines.last().is_some_and(|s| s.is_empty()) { + original_lines.pop(); + } + + let replacements = compute_replacements(&original_lines, path, chunks)?; + let new_lines = apply_replacements(original_lines, &replacements); + let mut new_lines = new_lines; + if !new_lines.last().is_some_and(|s| s.is_empty()) { + new_lines.push(String::new()); + } + let new_contents = new_lines.join("\n"); + Ok(AppliedPatch { + original_contents, + new_contents, + }) +} + +/// Compute a list of replacements needed to transform `original_lines` into the +/// new lines, given the patch `chunks`. Each replacement is returned as +/// `(start_index, old_len, new_lines)`. +fn compute_replacements( + original_lines: &[String], + path: &Path, + chunks: &[UpdateFileChunk], +) -> std::result::Result)>, ApplyPatchError> { + let mut replacements: Vec<(usize, usize, Vec)> = Vec::new(); + let mut line_index: usize = 0; + + for chunk in chunks { + // If a chunk has a `change_context`, we use seek_sequence to find it, then + // adjust our `line_index` to continue from there. + if let Some(ctx_line) = &chunk.change_context { + if let Some(idx) = + seek_sequence::seek_sequence(original_lines, &[ctx_line.clone()], line_index, false) + { + line_index = idx + 1; + } else { + return Err(ApplyPatchError::ComputeReplacements(format!( + "Failed to find context '{}' in {}", + ctx_line, + path.display() + ))); + } + } + + if chunk.old_lines.is_empty() { + // Pure addition (no old lines). We'll add them at the end or just + // before the final empty line if one exists. + let insertion_idx = if original_lines.last().is_some_and(|s| s.is_empty()) { + original_lines.len() - 1 + } else { + original_lines.len() + }; + replacements.push((insertion_idx, 0, chunk.new_lines.clone())); + continue; + } + + // Otherwise, try to match the existing lines in the file with the old lines + // from the chunk. If found, schedule that region for replacement. + // Attempt to locate the `old_lines` verbatim within the file. In many + // real‑world diffs the last element of `old_lines` is an *empty* string + // representing the terminating newline of the region being replaced. + // This sentinel is not present in `original_lines` because we strip the + // trailing empty slice emitted by `split('\n')`. If a direct search + // fails and the pattern ends with an empty string, retry without that + // final element so that modifications touching the end‑of‑file can be + // located reliably. + + let mut pattern: &[String] = &chunk.old_lines; + let mut found = + seek_sequence::seek_sequence(original_lines, pattern, line_index, chunk.is_end_of_file); + + let mut new_slice: &[String] = &chunk.new_lines; + + if found.is_none() && pattern.last().is_some_and(|s| s.is_empty()) { + // Retry without the trailing empty line which represents the final + // newline in the file. + pattern = &pattern[..pattern.len() - 1]; + if new_slice.last().is_some_and(|s| s.is_empty()) { + new_slice = &new_slice[..new_slice.len() - 1]; + } + + found = seek_sequence::seek_sequence( + original_lines, + pattern, + line_index, + chunk.is_end_of_file, + ); + } + + if let Some(start_idx) = found { + replacements.push((start_idx, pattern.len(), new_slice.to_vec())); + line_index = start_idx + pattern.len(); + } else { + return Err(ApplyPatchError::ComputeReplacements(format!( + "Failed to find expected lines {:?} in {}", + chunk.old_lines, + path.display() + ))); + } + } + + Ok(replacements) +} + +/// Apply the `(start_index, old_len, new_lines)` replacements to `original_lines`, +/// returning the modified file contents as a vector of lines. +fn apply_replacements( + mut lines: Vec, + replacements: &[(usize, usize, Vec)], +) -> Vec { + // We must apply replacements in descending order so that earlier replacements + // don't shift the positions of later ones. + for (start_idx, old_len, new_segment) in replacements.iter().rev() { + let start_idx = *start_idx; + let old_len = *old_len; + + // Remove old lines. + for _ in 0..old_len { + if start_idx < lines.len() { + lines.remove(start_idx); + } + } + + // Insert new lines. + for (offset, new_line) in new_segment.iter().enumerate() { + lines.insert(start_idx + offset, new_line.clone()); + } + } + + lines +} + +pub fn unified_diff_from_chunks( + path: &Path, + chunks: &[UpdateFileChunk], +) -> std::result::Result { + unified_diff_from_chunks_with_context(path, chunks, 1) +} + +pub fn unified_diff_from_chunks_with_context( + path: &Path, + chunks: &[UpdateFileChunk], + context: usize, +) -> std::result::Result { + let AppliedPatch { + original_contents, + new_contents, + } = derive_new_contents_from_chunks(path, chunks)?; + let text_diff = TextDiff::from_lines(&original_contents, &new_contents); + Ok(text_diff.unified_diff().context_radius(context).to_string()) +} + +/// Print the summary of changes in git-style format. +/// Write a summary of changes to the given writer. +pub fn print_summary( + affected: &AffectedPaths, + out: &mut impl std::io::Write, +) -> std::io::Result<()> { + writeln!(out, "Success. Updated the following files:")?; + for path in &affected.added { + writeln!(out, "A {}", path.display())?; + } + for path in &affected.modified { + writeln!(out, "M {}", path.display())?; + } + for path in &affected.deleted { + writeln!(out, "D {}", path.display())?; + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + use std::fs; + use tempfile::tempdir; + + /// Helper to construct a patch with the given body. + fn wrap_patch(body: &str) -> String { + format!("*** Begin Patch\n{}\n*** End Patch", body) + } + + fn strs_to_strings(strs: &[&str]) -> Vec { + strs.iter().map(|s| s.to_string()).collect() + } + + #[test] + fn test_literal() { + let args = strs_to_strings(&[ + "apply_patch", + r#"*** Begin Patch +*** Add File: foo ++hi +*** End Patch +"#, + ]); + + match maybe_parse_apply_patch(&args) { + MaybeApplyPatch::Body(hunks) => { + assert_eq!( + hunks, + vec![Hunk::AddFile { + path: PathBuf::from("foo"), + contents: "hi\n".to_string() + }] + ); + } + result => panic!("expected MaybeApplyPatch::Body got {:?}", result), + } + } + + #[test] + fn test_heredoc() { + let args = strs_to_strings(&[ + "bash", + "-lc", + r#"apply_patch <<'PATCH' +*** Begin Patch +*** Add File: foo ++hi +*** End Patch +PATCH"#, + ]); + + match maybe_parse_apply_patch(&args) { + MaybeApplyPatch::Body(hunks) => { + assert_eq!( + hunks, + vec![Hunk::AddFile { + path: PathBuf::from("foo"), + contents: "hi\n".to_string() + }] + ); + } + result => panic!("expected MaybeApplyPatch::Body got {:?}", result), + } + } + + #[test] + fn test_add_file_hunk_creates_file_with_contents() { + let dir = tempdir().unwrap(); + let path = dir.path().join("add.txt"); + let patch = wrap_patch(&format!( + r#"*** Add File: {} ++ab ++cd"#, + path.display() + )); + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + // Verify expected stdout and stderr outputs. + let stdout_str = String::from_utf8(stdout).unwrap(); + let stderr_str = String::from_utf8(stderr).unwrap(); + let expected_out = format!( + "Success. Updated the following files:\nA {}\n", + path.display() + ); + assert_eq!(stdout_str, expected_out); + assert_eq!(stderr_str, ""); + let contents = fs::read_to_string(path).unwrap(); + assert_eq!(contents, "ab\ncd\n"); + } + + #[test] + fn test_delete_file_hunk_removes_file() { + let dir = tempdir().unwrap(); + let path = dir.path().join("del.txt"); + fs::write(&path, "x").unwrap(); + let patch = wrap_patch(&format!("*** Delete File: {}", path.display())); + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + let stdout_str = String::from_utf8(stdout).unwrap(); + let stderr_str = String::from_utf8(stderr).unwrap(); + let expected_out = format!( + "Success. Updated the following files:\nD {}\n", + path.display() + ); + assert_eq!(stdout_str, expected_out); + assert_eq!(stderr_str, ""); + assert!(!path.exists()); + } + + #[test] + fn test_update_file_hunk_modifies_content() { + let dir = tempdir().unwrap(); + let path = dir.path().join("update.txt"); + fs::write(&path, "foo\nbar\n").unwrap(); + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ + foo +-bar ++baz"#, + path.display() + )); + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + // Validate modified file contents and expected stdout/stderr. + let stdout_str = String::from_utf8(stdout).unwrap(); + let stderr_str = String::from_utf8(stderr).unwrap(); + let expected_out = format!( + "Success. Updated the following files:\nM {}\n", + path.display() + ); + assert_eq!(stdout_str, expected_out); + assert_eq!(stderr_str, ""); + let contents = fs::read_to_string(&path).unwrap(); + assert_eq!(contents, "foo\nbaz\n"); + } + + #[test] + fn test_update_file_hunk_can_move_file() { + let dir = tempdir().unwrap(); + let src = dir.path().join("src.txt"); + let dest = dir.path().join("dst.txt"); + fs::write(&src, "line\n").unwrap(); + let patch = wrap_patch(&format!( + r#"*** Update File: {} +*** Move to: {} +@@ +-line ++line2"#, + src.display(), + dest.display() + )); + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + // Validate move semantics and expected stdout/stderr. + let stdout_str = String::from_utf8(stdout).unwrap(); + let stderr_str = String::from_utf8(stderr).unwrap(); + let expected_out = format!( + "Success. Updated the following files:\nM {}\n", + dest.display() + ); + assert_eq!(stdout_str, expected_out); + assert_eq!(stderr_str, ""); + assert!(!src.exists()); + let contents = fs::read_to_string(&dest).unwrap(); + assert_eq!(contents, "line2\n"); + } + + /// Verify that a single `Update File` hunk with multiple change chunks can update different + /// parts of a file and that the file is listed only once in the summary. + #[test] + fn test_multiple_update_chunks_apply_to_single_file() { + // Start with a file containing four lines. + let dir = tempdir().unwrap(); + let path = dir.path().join("multi.txt"); + fs::write(&path, "foo\nbar\nbaz\nqux\n").unwrap(); + // Construct an update patch with two separate change chunks. + // The first chunk uses the line `foo` as context and transforms `bar` into `BAR`. + // The second chunk uses `baz` as context and transforms `qux` into `QUX`. + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ + foo +-bar ++BAR +@@ + baz +-qux ++QUX"#, + path.display() + )); + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + let stdout_str = String::from_utf8(stdout).unwrap(); + let stderr_str = String::from_utf8(stderr).unwrap(); + let expected_out = format!( + "Success. Updated the following files:\nM {}\n", + path.display() + ); + assert_eq!(stdout_str, expected_out); + assert_eq!(stderr_str, ""); + let contents = fs::read_to_string(&path).unwrap(); + assert_eq!(contents, "foo\nBAR\nbaz\nQUX\n"); + } + + /// A more involved `Update File` hunk that exercises additions, deletions and + /// replacements in separate chunks that appear in non‑adjacent parts of the + /// file. Verifies that all edits are applied and that the summary lists the + /// file only once. + #[test] + fn test_update_file_hunk_interleaved_changes() { + let dir = tempdir().unwrap(); + let path = dir.path().join("interleaved.txt"); + + // Original file: six numbered lines. + fs::write(&path, "a\nb\nc\nd\ne\nf\n").unwrap(); + + // Patch performs: + // • Replace `b` → `B` + // • Replace `e` → `E` (using surrounding context) + // • Append new line `g` at the end‑of‑file + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ + a +-b ++B +@@ + c + d +-e ++E +@@ + f ++g +*** End of File"#, + path.display() + )); + + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + + let stdout_str = String::from_utf8(stdout).unwrap(); + let stderr_str = String::from_utf8(stderr).unwrap(); + + let expected_out = format!( + "Success. Updated the following files:\nM {}\n", + path.display() + ); + assert_eq!(stdout_str, expected_out); + assert_eq!(stderr_str, ""); + + let contents = fs::read_to_string(&path).unwrap(); + assert_eq!(contents, "a\nB\nc\nd\nE\nf\ng\n"); + } + + #[test] + fn test_unified_diff() { + // Start with a file containing four lines. + let dir = tempdir().unwrap(); + let path = dir.path().join("multi.txt"); + fs::write(&path, "foo\nbar\nbaz\nqux\n").unwrap(); + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ + foo +-bar ++BAR +@@ + baz +-qux ++QUX"#, + path.display() + )); + let patch = parse_patch(&patch).unwrap(); + + let update_file_chunks = match patch.as_slice() { + [Hunk::UpdateFile { chunks, .. }] => chunks, + _ => panic!("Expected a single UpdateFile hunk"), + }; + let diff = unified_diff_from_chunks(&path, update_file_chunks).unwrap(); + let expected_diff = r#"@@ -1,4 +1,4 @@ + foo +-bar ++BAR + baz +-qux ++QUX +"#; + assert_eq!(expected_diff, diff); + } + + #[test] + fn test_unified_diff_first_line_replacement() { + // Replace the very first line of the file. + let dir = tempdir().unwrap(); + let path = dir.path().join("first.txt"); + fs::write(&path, "foo\nbar\nbaz\n").unwrap(); + + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ +-foo ++FOO + bar +"#, + path.display() + )); + + let patch = parse_patch(&patch).unwrap(); + let chunks = match patch.as_slice() { + [Hunk::UpdateFile { chunks, .. }] => chunks, + _ => panic!("Expected a single UpdateFile hunk"), + }; + + let diff = unified_diff_from_chunks(&path, chunks).unwrap(); + let expected_diff = r#"@@ -1,2 +1,2 @@ +-foo ++FOO + bar +"#; + assert_eq!(expected_diff, diff); + } + + #[test] + fn test_unified_diff_last_line_replacement() { + // Replace the very last line of the file. + let dir = tempdir().unwrap(); + let path = dir.path().join("last.txt"); + fs::write(&path, "foo\nbar\nbaz\n").unwrap(); + + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ + foo + bar +-baz ++BAZ +"#, + path.display() + )); + + let patch = parse_patch(&patch).unwrap(); + let chunks = match patch.as_slice() { + [Hunk::UpdateFile { chunks, .. }] => chunks, + _ => panic!("Expected a single UpdateFile hunk"), + }; + + let diff = unified_diff_from_chunks(&path, chunks).unwrap(); + let expected_diff = r#"@@ -2,2 +2,2 @@ + bar +-baz ++BAZ +"#; + assert_eq!(expected_diff, diff); + } + + #[test] + fn test_unified_diff_insert_at_eof() { + // Insert a new line at end‑of‑file. + let dir = tempdir().unwrap(); + let path = dir.path().join("insert.txt"); + fs::write(&path, "foo\nbar\nbaz\n").unwrap(); + + let patch = wrap_patch(&format!( + r#"*** Update File: {} +@@ ++quux +*** End of File +"#, + path.display() + )); + + let patch = parse_patch(&patch).unwrap(); + let chunks = match patch.as_slice() { + [Hunk::UpdateFile { chunks, .. }] => chunks, + _ => panic!("Expected a single UpdateFile hunk"), + }; + + let diff = unified_diff_from_chunks(&path, chunks).unwrap(); + let expected_diff = r#"@@ -3 +3,2 @@ + baz ++quux +"#; + assert_eq!(expected_diff, diff); + } + + #[test] + fn test_unified_diff_interleaved_changes() { + // Original file with six lines. + let dir = tempdir().unwrap(); + let path = dir.path().join("interleaved.txt"); + fs::write(&path, "a\nb\nc\nd\ne\nf\n").unwrap(); + + // Patch replaces two separate lines and appends a new one at EOF using + // three distinct chunks. + let patch_body = format!( + r#"*** Update File: {} +@@ + a +-b ++B +@@ + d +-e ++E +@@ + f ++g +*** End of File"#, + path.display() + ); + let patch = wrap_patch(&patch_body); + + // Extract chunks then build the unified diff. + let parsed = parse_patch(&patch).unwrap(); + let chunks = match parsed.as_slice() { + [Hunk::UpdateFile { chunks, .. }] => chunks, + _ => panic!("Expected a single UpdateFile hunk"), + }; + + let diff = unified_diff_from_chunks(&path, chunks).unwrap(); + + let expected = r#"@@ -1,6 +1,7 @@ + a +-b ++B + c + d +-e ++E + f ++g +"#; + + assert_eq!(expected, diff); + + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + apply_patch(&patch, &mut stdout, &mut stderr).unwrap(); + let contents = fs::read_to_string(path).unwrap(); + assert_eq!( + contents, + r#"a +B +c +d +E +f +g +"# + ); + } +} diff --git a/codex-rs/apply-patch/src/parser.rs b/codex-rs/apply-patch/src/parser.rs new file mode 100644 index 00000000..4fa2ff71 --- /dev/null +++ b/codex-rs/apply-patch/src/parser.rs @@ -0,0 +1,499 @@ +//! This module is responsible for parsing & validating a patch into a list of "hunks". +//! (It does not attempt to actually check that the patch can be applied to the filesystem.) +//! +//! The official Lark grammar for the apply-patch format is: +//! +//! start: begin_patch hunk+ end_patch +//! begin_patch: "*** Begin Patch" LF +//! end_patch: "*** End Patch" LF? +//! +//! hunk: add_hunk | delete_hunk | update_hunk +//! add_hunk: "*** Add File: " filename LF add_line+ +//! delete_hunk: "*** Delete File: " filename LF +//! update_hunk: "*** Update File: " filename LF change_move? change? +//! filename: /(.+)/ +//! add_line: "+" /(.+)/ LF -> line +//! +//! change_move: "*** Move to: " filename LF +//! change: (change_context | change_line)+ eof_line? +//! change_context: ("@@" | "@@ " /(.+)/) LF +//! change_line: ("+" | "-" | " ") /(.+)/ LF +//! eof_line: "*** End of File" LF +//! +//! The parser below is a little more lenient than the explicit spec and allows for +//! leading/trailing whitespace around patch markers. +use std::path::PathBuf; + +use thiserror::Error; + +const BEGIN_PATCH_MARKER: &str = "*** Begin Patch"; +const END_PATCH_MARKER: &str = "*** End Patch"; +const ADD_FILE_MARKER: &str = "*** Add File: "; +const DELETE_FILE_MARKER: &str = "*** Delete File: "; +const UPDATE_FILE_MARKER: &str = "*** Update File: "; +const MOVE_TO_MARKER: &str = "*** Move to: "; +const EOF_MARKER: &str = "*** End of File"; +const CHANGE_CONTEXT_MARKER: &str = "@@ "; +const EMPTY_CHANGE_CONTEXT_MARKER: &str = "@@"; + +#[derive(Debug, PartialEq, Error)] +pub enum ParseError { + #[error("invalid patch: {0}")] + InvalidPatchError(String), + #[error("invalid hunk at line {line_number}, {message}")] + InvalidHunkError { message: String, line_number: usize }, +} +use ParseError::*; + +#[derive(Debug, PartialEq)] +#[allow(clippy::enum_variant_names)] +pub enum Hunk { + AddFile { + path: PathBuf, + contents: String, + }, + DeleteFile { + path: PathBuf, + }, + UpdateFile { + path: PathBuf, + move_path: Option, + + /// Chunks should be in order, i.e. the `change_context` of one chunk + /// should occur later in the file than the previous chunk. + chunks: Vec, + }, +} +use Hunk::*; + +#[derive(Debug, PartialEq)] +pub struct UpdateFileChunk { + /// A single line of context used to narrow down the position of the chunk + /// (this is usually a class, method, or function definition.) + pub change_context: Option, + + /// A contiguous block of lines that should be replaced with `new_lines`. + /// `old_lines` must occur strictly after `change_context`. + pub old_lines: Vec, + pub new_lines: Vec, + + /// If set to true, `old_lines` must occur at the end of the source file. + /// (Tolerance around trailing newlines should be encouraged.) + pub is_end_of_file: bool, +} + +pub fn parse_patch(patch: &str) -> Result, ParseError> { + let lines: Vec<&str> = patch.trim().lines().collect(); + if lines.is_empty() || lines[0] != BEGIN_PATCH_MARKER { + return Err(InvalidPatchError(String::from( + "The first line of the patch must be '*** Begin Patch'", + ))); + } + let last_line_index = lines.len() - 1; + if lines[last_line_index] != END_PATCH_MARKER { + return Err(InvalidPatchError(String::from( + "The last line of the patch must be '*** End Patch'", + ))); + } + let mut hunks: Vec = Vec::new(); + let mut remaining_lines = &lines[1..last_line_index]; + let mut line_number = 2; + while !remaining_lines.is_empty() { + let (hunk, hunk_lines) = parse_one_hunk(remaining_lines, line_number)?; + hunks.push(hunk); + line_number += hunk_lines; + remaining_lines = &remaining_lines[hunk_lines..] + } + Ok(hunks) +} + +/// Attempts to parse a single hunk from the start of lines. +/// Returns the parsed hunk and the number of lines parsed (or a ParseError). +fn parse_one_hunk(lines: &[&str], line_number: usize) -> Result<(Hunk, usize), ParseError> { + // Be tolerant of case mismatches and extra padding around marker strings. + let first_line = lines[0].trim(); + if let Some(path) = first_line.strip_prefix(ADD_FILE_MARKER) { + // Add File + let mut contents = String::new(); + let mut parsed_lines = 1; + for add_line in &lines[1..] { + if let Some(line_to_add) = add_line.strip_prefix('+') { + contents.push_str(line_to_add); + contents.push('\n'); + parsed_lines += 1; + } else { + break; + } + } + return Ok(( + AddFile { + path: PathBuf::from(path), + contents, + }, + parsed_lines, + )); + } else if let Some(path) = first_line.strip_prefix(DELETE_FILE_MARKER) { + // Delete File + return Ok(( + DeleteFile { + path: PathBuf::from(path), + }, + 1, + )); + } else if let Some(path) = first_line.strip_prefix(UPDATE_FILE_MARKER) { + // Update File + let mut remaining_lines = &lines[1..]; + let mut parsed_lines = 1; + + // Optional: move file line + let move_path = remaining_lines + .first() + .and_then(|x| x.strip_prefix(MOVE_TO_MARKER)); + + if move_path.is_some() { + remaining_lines = &remaining_lines[1..]; + parsed_lines += 1; + } + + let mut chunks = Vec::new(); + // NOTE: we need to know to stop once we reach the next special marker header. + while !remaining_lines.is_empty() { + // Skip over any completely blank lines that may separate chunks. + if remaining_lines[0].trim().is_empty() { + parsed_lines += 1; + remaining_lines = &remaining_lines[1..]; + continue; + } + + if remaining_lines[0].starts_with("***") { + break; + } + + let (chunk, chunk_lines) = parse_update_file_chunk( + remaining_lines, + line_number + parsed_lines, + chunks.is_empty(), + )?; + chunks.push(chunk); + parsed_lines += chunk_lines; + remaining_lines = &remaining_lines[chunk_lines..] + } + + if chunks.is_empty() { + return Err(InvalidHunkError { + message: format!("Update file hunk for path '{path}' is empty"), + line_number, + }); + } + + return Ok(( + UpdateFile { + path: PathBuf::from(path), + move_path: move_path.map(PathBuf::from), + chunks, + }, + parsed_lines, + )); + } + + Err(InvalidHunkError { message: format!("'{first_line}' is not a valid hunk header. Valid hunk headers: '*** Add File: {{path}}', '*** Delete File: {{path}}', '*** Update File: {{path}}'"), line_number }) +} + +fn parse_update_file_chunk( + lines: &[&str], + line_number: usize, + allow_missing_context: bool, +) -> Result<(UpdateFileChunk, usize), ParseError> { + if lines.is_empty() { + return Err(InvalidHunkError { + message: "Update hunk does not contain any lines".to_string(), + line_number, + }); + } + // If we see an explicit context marker @@ or @@ , consume it; otherwise, optionally + // allow treating the chunk as starting directly with diff lines. + let (change_context, start_index) = if lines[0] == EMPTY_CHANGE_CONTEXT_MARKER { + (None, 1) + } else if let Some(context) = lines[0].strip_prefix(CHANGE_CONTEXT_MARKER) { + (Some(context.to_string()), 1) + } else { + if !allow_missing_context { + return Err(InvalidHunkError { + message: format!( + "Expected update hunk to start with a @@ context marker, got: '{}'", + lines[0] + ), + line_number, + }); + } + (None, 0) + }; + if start_index >= lines.len() { + return Err(InvalidHunkError { + message: "Update hunk does not contain any lines".to_string(), + line_number: line_number + 1, + }); + } + let mut chunk = UpdateFileChunk { + change_context, + old_lines: Vec::new(), + new_lines: Vec::new(), + is_end_of_file: false, + }; + let mut parsed_lines = 0; + for line in &lines[start_index..] { + match *line { + EOF_MARKER => { + if parsed_lines == 0 { + return Err(InvalidHunkError { + message: "Update hunk does not contain any lines".to_string(), + line_number: line_number + 1, + }); + } + chunk.is_end_of_file = true; + parsed_lines += 1; + break; + } + line_contents => { + match line_contents.chars().next() { + None => { + // Interpret this as an empty line. + chunk.old_lines.push(String::new()); + chunk.new_lines.push(String::new()); + } + Some(' ') => { + chunk.old_lines.push(line_contents[1..].to_string()); + chunk.new_lines.push(line_contents[1..].to_string()); + } + Some('+') => { + chunk.new_lines.push(line_contents[1..].to_string()); + } + Some('-') => { + chunk.old_lines.push(line_contents[1..].to_string()); + } + _ => { + if parsed_lines == 0 { + return Err(InvalidHunkError { message: format!("Unexpected line found in update hunk: '{line_contents}'. Every line should start with ' ' (context line), '+' (added line), or '-' (removed line)"), line_number: line_number + 1 }); + } + // Assume this is the start of the next hunk. + break; + } + } + parsed_lines += 1; + } + } + } + + Ok((chunk, parsed_lines + start_index)) +} + +#[test] +fn test_parse_patch() { + assert_eq!( + parse_patch("bad"), + Err(InvalidPatchError( + "The first line of the patch must be '*** Begin Patch'".to_string() + )) + ); + assert_eq!( + parse_patch("*** Begin Patch\nbad"), + Err(InvalidPatchError( + "The last line of the patch must be '*** End Patch'".to_string() + )) + ); + assert_eq!( + parse_patch( + "*** Begin Patch\n\ + *** Update File: test.py\n\ + *** End Patch" + ), + Err(InvalidHunkError { + message: "Update file hunk for path 'test.py' is empty".to_string(), + line_number: 2, + }) + ); + assert_eq!( + parse_patch( + "*** Begin Patch\n\ + *** End Patch" + ), + Ok(Vec::new()) + ); + assert_eq!( + parse_patch( + "*** Begin Patch\n\ + *** Add File: path/add.py\n\ + +abc\n\ + +def\n\ + *** Delete File: path/delete.py\n\ + *** Update File: path/update.py\n\ + *** Move to: path/update2.py\n\ + @@ def f():\n\ + - pass\n\ + + return 123\n\ + *** End Patch" + ), + Ok(vec![ + AddFile { + path: PathBuf::from("path/add.py"), + contents: "abc\ndef\n".to_string() + }, + DeleteFile { + path: PathBuf::from("path/delete.py") + }, + UpdateFile { + path: PathBuf::from("path/update.py"), + move_path: Some(PathBuf::from("path/update2.py")), + chunks: vec![UpdateFileChunk { + change_context: Some("def f():".to_string()), + old_lines: vec![" pass".to_string()], + new_lines: vec![" return 123".to_string()], + is_end_of_file: false + }] + } + ]) + ); + // Update hunk followed by another hunk (Add File). + assert_eq!( + parse_patch( + "*** Begin Patch\n\ + *** Update File: file.py\n\ + @@\n\ + +line\n\ + *** Add File: other.py\n\ + +content\n\ + *** End Patch" + ), + Ok(vec![ + UpdateFile { + path: PathBuf::from("file.py"), + move_path: None, + chunks: vec![UpdateFileChunk { + change_context: None, + old_lines: vec![], + new_lines: vec!["line".to_string()], + is_end_of_file: false + }], + }, + AddFile { + path: PathBuf::from("other.py"), + contents: "content\n".to_string() + } + ]) + ); + + // Update hunk without an explicit @@ header for the first chunk should parse. + // Use a raw string to preserve the leading space diff marker on the context line. + assert_eq!( + parse_patch( + r#"*** Begin Patch +*** Update File: file2.py + import foo ++bar +*** End Patch"#, + ), + Ok(vec![UpdateFile { + path: PathBuf::from("file2.py"), + move_path: None, + chunks: vec![UpdateFileChunk { + change_context: None, + old_lines: vec!["import foo".to_string()], + new_lines: vec!["import foo".to_string(), "bar".to_string()], + is_end_of_file: false, + }], + }]) + ); +} + +#[test] +fn test_parse_one_hunk() { + assert_eq!( + parse_one_hunk(&["bad"], 234), + Err(InvalidHunkError { + message: "'bad' is not a valid hunk header. \ + Valid hunk headers: '*** Add File: {path}', '*** Delete File: {path}', '*** Update File: {path}'".to_string(), + line_number: 234 + }) + ); + // Other edge cases are already covered by tests above/below. +} + +#[test] +fn test_update_file_chunk() { + assert_eq!( + parse_update_file_chunk(&["bad"], 123, false), + Err(InvalidHunkError { + message: "Expected update hunk to start with a @@ context marker, got: 'bad'" + .to_string(), + line_number: 123 + }) + ); + assert_eq!( + parse_update_file_chunk(&["@@"], 123, false), + Err(InvalidHunkError { + message: "Update hunk does not contain any lines".to_string(), + line_number: 124 + }) + ); + assert_eq!( + parse_update_file_chunk(&["@@", "bad"], 123, false), + Err(InvalidHunkError { + message: "Unexpected line found in update hunk: 'bad'. \ + Every line should start with ' ' (context line), '+' (added line), or '-' (removed line)".to_string(), + line_number: 124 + }) + ); + assert_eq!( + parse_update_file_chunk(&["@@", "*** End of File"], 123, false), + Err(InvalidHunkError { + message: "Update hunk does not contain any lines".to_string(), + line_number: 124 + }) + ); + assert_eq!( + parse_update_file_chunk( + &[ + "@@ change_context", + "", + " context", + "-remove", + "+add", + " context2", + "*** End Patch", + ], + 123, + false + ), + Ok(( + (UpdateFileChunk { + change_context: Some("change_context".to_string()), + old_lines: vec![ + "".to_string(), + "context".to_string(), + "remove".to_string(), + "context2".to_string() + ], + new_lines: vec![ + "".to_string(), + "context".to_string(), + "add".to_string(), + "context2".to_string() + ], + is_end_of_file: false + }), + 6 + )) + ); + assert_eq!( + parse_update_file_chunk(&["@@", "+line", "*** End of File"], 123, false), + Ok(( + (UpdateFileChunk { + change_context: None, + old_lines: vec![], + new_lines: vec!["line".to_string()], + is_end_of_file: true + }), + 3 + )) + ); +} diff --git a/codex-rs/apply-patch/src/seek_sequence.rs b/codex-rs/apply-patch/src/seek_sequence.rs new file mode 100644 index 00000000..c379767d --- /dev/null +++ b/codex-rs/apply-patch/src/seek_sequence.rs @@ -0,0 +1,107 @@ +/// Attempt to find the sequence of `pattern` lines within `lines` beginning at or after `start`. +/// Returns the starting index of the match or `None` if not found. Matches are attempted with +/// decreasing strictness: exact match, then ignoring trailing whitespace, then ignoring leading +/// and trailing whitespace. When `eof` is true, we first try starting at the end-of-file (so that +/// patterns intended to match file endings are applied at the end), and fall back to searching +/// from `start` if needed. +/// +/// Special cases handled defensively: +/// • Empty `pattern` → returns `Some(start)` (no-op match) +/// • `pattern.len() > lines.len()` → returns `None` (cannot match, avoids +/// out‑of‑bounds panic that occurred pre‑2025‑04‑12) +pub(crate) fn seek_sequence( + lines: &[String], + pattern: &[String], + start: usize, + eof: bool, +) -> Option { + if pattern.is_empty() { + return Some(start); + } + + // When the pattern is longer than the available input there is no possible + // match. Early‑return to avoid the out‑of‑bounds slice that would occur in + // the search loops below (previously caused a panic when + // `pattern.len() > lines.len()`). + if pattern.len() > lines.len() { + return None; + } + let search_start = if eof && lines.len() >= pattern.len() { + lines.len() - pattern.len() + } else { + start + }; + // Exact match first. + for i in search_start..=lines.len().saturating_sub(pattern.len()) { + if lines[i..i + pattern.len()] == *pattern { + return Some(i); + } + } + // Then rstrip match. + for i in search_start..=lines.len().saturating_sub(pattern.len()) { + let mut ok = true; + for (p_idx, pat) in pattern.iter().enumerate() { + if lines[i + p_idx].trim_end() != pat.trim_end() { + ok = false; + break; + } + } + if ok { + return Some(i); + } + } + // Finally, trim both sides to allow more lenience. + for i in search_start..=lines.len().saturating_sub(pattern.len()) { + let mut ok = true; + for (p_idx, pat) in pattern.iter().enumerate() { + if lines[i + p_idx].trim() != pat.trim() { + ok = false; + break; + } + } + if ok { + return Some(i); + } + } + None +} + +#[cfg(test)] +mod tests { + use super::seek_sequence; + + fn to_vec(strings: &[&str]) -> Vec { + strings.iter().map(|s| s.to_string()).collect() + } + + #[test] + fn test_exact_match_finds_sequence() { + let lines = to_vec(&["foo", "bar", "baz"]); + let pattern = to_vec(&["bar", "baz"]); + assert_eq!(seek_sequence(&lines, &pattern, 0, false), Some(1)); + } + + #[test] + fn test_rstrip_match_ignores_trailing_whitespace() { + let lines = to_vec(&["foo ", "bar\t\t"]); + // Pattern omits trailing whitespace. + let pattern = to_vec(&["foo", "bar"]); + assert_eq!(seek_sequence(&lines, &pattern, 0, false), Some(0)); + } + + #[test] + fn test_trim_match_ignores_leading_and_trailing_whitespace() { + let lines = to_vec(&[" foo ", " bar\t"]); + // Pattern omits any additional whitespace. + let pattern = to_vec(&["foo", "bar"]); + assert_eq!(seek_sequence(&lines, &pattern, 0, false), Some(0)); + } + + #[test] + fn test_pattern_longer_than_input_returns_none() { + let lines = to_vec(&["just one line"]); + let pattern = to_vec(&["too", "many", "lines"]); + // Should not panic – must return None when pattern cannot possibly fit. + assert_eq!(seek_sequence(&lines, &pattern, 0, false), None); + } +} diff --git a/codex-rs/cli/Cargo.toml b/codex-rs/cli/Cargo.toml new file mode 100644 index 00000000..12dab8c0 --- /dev/null +++ b/codex-rs/cli/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "codex-cli" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "codex" +path = "src/main.rs" + +[dependencies] +anyhow = "1" +clap = { version = "4", features = ["derive"] } +codex-core = { path = "../core" } +codex-exec = { path = "../exec" } +codex-interactive = { path = "../interactive" } +codex-repl = { path = "../repl" } +codex-tui = { path = "../tui" } +serde_json = "1" +tokio = { version = "1", features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } +tracing = "0.1.41" +tracing-subscriber = "0.3.19" diff --git a/codex-rs/cli/src/main.rs b/codex-rs/cli/src/main.rs new file mode 100644 index 00000000..2eaaa1c8 --- /dev/null +++ b/codex-rs/cli/src/main.rs @@ -0,0 +1,112 @@ +mod proto; +mod seatbelt; + +use std::path::PathBuf; + +use clap::ArgAction; +use clap::Parser; +use codex_exec::Cli as ExecCli; +use codex_interactive::Cli as InteractiveCli; +use codex_repl::Cli as ReplCli; +use codex_tui::Cli as TuiCli; + +use crate::proto::ProtoCli; + +/// Codex CLI +/// +/// If no subcommand is specified, options will be forwarded to the interactive CLI. +#[derive(Debug, Parser)] +#[clap( + author, + version, + // If a sub‑command is given, ignore requirements of the default args. + subcommand_negates_reqs = true +)] +struct MultitoolCli { + #[clap(flatten)] + interactive: InteractiveCli, + + #[clap(subcommand)] + subcommand: Option, +} + +#[derive(Debug, clap::Subcommand)] +enum Subcommand { + /// Run Codex non-interactively. + #[clap(visible_alias = "e")] + Exec(ExecCli), + + /// Run the TUI. + #[clap(visible_alias = "t")] + Tui(TuiCli), + + /// Run the REPL. + #[clap(visible_alias = "r")] + Repl(ReplCli), + + /// Run the Protocol stream via stdin/stdout + #[clap(visible_alias = "p")] + Proto(ProtoCli), + + /// Internal debugging commands. + Debug(DebugArgs), +} + +#[derive(Debug, Parser)] +struct DebugArgs { + #[command(subcommand)] + cmd: DebugCommand, +} + +#[derive(Debug, clap::Subcommand)] +enum DebugCommand { + /// Run a command under Seatbelt (macOS only). + Seatbelt(SeatbeltCommand), +} + +#[derive(Debug, Parser)] +struct SeatbeltCommand { + /// Writable folder for sandbox in full-auto mode (can be specified multiple times). + #[arg(long = "writable-root", short = 'w', value_name = "DIR", action = ArgAction::Append, use_value_delimiter = false)] + writable_roots: Vec, + + /// Full command args to run under seatbelt. + #[arg(trailing_var_arg = true)] + command: Vec, +} + +#[derive(Debug, Parser)] +struct ReplProto {} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = MultitoolCli::parse(); + + match cli.subcommand { + None => { + codex_interactive::run_main(cli.interactive).await?; + } + Some(Subcommand::Exec(exec_cli)) => { + codex_exec::run_main(exec_cli).await?; + } + Some(Subcommand::Tui(tui_cli)) => { + codex_tui::run_main(tui_cli)?; + } + Some(Subcommand::Repl(repl_cli)) => { + codex_repl::run_main(repl_cli).await?; + } + Some(Subcommand::Proto(proto_cli)) => { + proto::run_main(proto_cli).await?; + } + Some(Subcommand::Debug(debug_args)) => match debug_args.cmd { + DebugCommand::Seatbelt(SeatbeltCommand { + command, + writable_roots, + }) => { + seatbelt::run_seatbelt(command, writable_roots).await?; + } + }, + } + + Ok(()) +} diff --git a/codex-rs/cli/src/proto.rs b/codex-rs/cli/src/proto.rs new file mode 100644 index 00000000..5f4f466e --- /dev/null +++ b/codex-rs/cli/src/proto.rs @@ -0,0 +1,94 @@ +use std::io::IsTerminal; + +use clap::Parser; +use codex_core::protocol::Submission; +use codex_core::util::notify_on_sigint; +use codex_core::Codex; +use tokio::io::AsyncBufReadExt; +use tokio::io::BufReader; +use tracing::error; +use tracing::info; + +#[derive(Debug, Parser)] +pub struct ProtoCli {} + +pub async fn run_main(_opts: ProtoCli) -> anyhow::Result<()> { + if std::io::stdin().is_terminal() { + anyhow::bail!("Protocol mode expects stdin to be a pipe, not a terminal"); + } + + tracing_subscriber::fmt() + .with_writer(std::io::stderr) + .init(); + + let ctrl_c = notify_on_sigint(); + let codex = Codex::spawn(ctrl_c.clone())?; + + // Task that reads JSON lines from stdin and forwards to Submission Queue + let sq_fut = { + let codex = codex.clone(); + let ctrl_c = ctrl_c.clone(); + async move { + let stdin = BufReader::new(tokio::io::stdin()); + let mut lines = stdin.lines(); + loop { + let result = tokio::select! { + _ = ctrl_c.notified() => { + info!("Interrupted, exiting"); + break + }, + res = lines.next_line() => res, + }; + + match result { + Ok(Some(line)) => { + let line = line.trim(); + if line.is_empty() { + continue; + } + match serde_json::from_str::(line) { + Ok(sub) => { + if let Err(e) = codex.submit(sub).await { + error!("{e:#}"); + break; + } + } + Err(e) => { + error!("invalid submission: {e}"); + } + } + } + _ => { + info!("Submission queue closed"); + break; + } + } + } + } + }; + + // Task that reads events from the agent and prints them as JSON lines to stdout + let eq_fut = async move { + loop { + let event = tokio::select! { + _ = ctrl_c.notified() => break, + event = codex.next_event() => event, + }; + match event { + Ok(event) => { + let event_str = + serde_json::to_string(&event).expect("JSON serialization failed"); + println!("{event_str}"); + } + Err(e) => { + error!("{e:#}"); + break; + } + } + } + info!("Event queue closed"); + }; + + tokio::join!(sq_fut, eq_fut); + Ok(()) +} diff --git a/codex-rs/cli/src/seatbelt.rs b/codex-rs/cli/src/seatbelt.rs new file mode 100644 index 00000000..c395d96c --- /dev/null +++ b/codex-rs/cli/src/seatbelt.rs @@ -0,0 +1,17 @@ +use codex_core::exec::create_seatbelt_command; +use std::path::PathBuf; + +pub(crate) async fn run_seatbelt( + command: Vec, + writable_roots: Vec, +) -> anyhow::Result<()> { + let seatbelt_command = create_seatbelt_command(command, &writable_roots); + let status = tokio::process::Command::new(seatbelt_command[0].clone()) + .args(&seatbelt_command[1..]) + .spawn() + .map_err(|e| anyhow::anyhow!("Failed to spawn command: {}", e))? + .wait() + .await + .map_err(|e| anyhow::anyhow!("Failed to wait for command: {}", e))?; + std::process::exit(status.code().unwrap_or(1)); +} diff --git a/codex-rs/core/Cargo.toml b/codex-rs/core/Cargo.toml new file mode 100644 index 00000000..778362d2 --- /dev/null +++ b/codex-rs/core/Cargo.toml @@ -0,0 +1,62 @@ +[package] +name = "codex-core" +version = "0.1.0" +edition = "2021" + +[lib] +name = "codex_core" +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +async-channel = "2.3.1" +base64 = "0.21" +bytes = "1.10.1" +clap = { version = "4", features = ["derive", "wrap_help"], optional = true } +codex-apply-patch = { path = "../apply-patch" } +dirs = "6" +env-flags = "0.1.1" +eventsource-stream = "0.2.3" +expanduser = "1.2.2" +fs-err = "3.1.0" +futures = "0.3" +mime_guess = "2.0" +patch = "0.7" +rand = "0.9" +reqwest = { version = "0.12", features = ["json", "stream"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +thiserror = "2.0.12" +tokio = { version = "1", features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } +tokio-util = "0.7.14" +toml = "0.8.20" +tracing = { version = "0.1.41", features = ["log"] } +tree-sitter = "0.25.3" +tree-sitter-bash = "0.23.3" + +[target.'cfg(target_os = "linux")'.dependencies] +libc = "0.2.172" +landlock = "0.4.1" +seccompiler = "0.5.0" + +# Build OpenSSL from source for musl builds. +[target.x86_64-unknown-linux-musl.dependencies] +openssl-sys = { version = "*", features = ["vendored"] } + +[dev-dependencies] +assert_cmd = "2" +predicates = "3" +tempfile = "3" +wiremock = "0.6" + +[features] +default = [] + +# Separate feature so that `clap` is not a mandatory dependency. +cli = ["clap"] diff --git a/codex-rs/core/README.md b/codex-rs/core/README.md new file mode 100644 index 00000000..9b3e59c8 --- /dev/null +++ b/codex-rs/core/README.md @@ -0,0 +1,10 @@ +# codex-core + +This crate implements the business logic for Codex. It is designed to be used by the various Codex UIs written in Rust. + +Though for non-Rust UIs, we are also working to define a _protocol_ for talking to Codex. See: + +- [Specification](../docs/protocol_v1.md) +- [Rust types](./src/protocol.rs) + +You can use the `proto` subcommand using the executable in the [`cli` crate](../cli) to speak the protocol using newline-delimited-JSON over stdin/stdout. diff --git a/codex-rs/core/prompt.md b/codex-rs/core/prompt.md new file mode 100644 index 00000000..66cd55b6 --- /dev/null +++ b/codex-rs/core/prompt.md @@ -0,0 +1,98 @@ +Please resolve the user's task by editing and testing the code files in your current code execution session. +You are a deployed coding agent. +Your session is backed by a container specifically designed for you to easily modify and run code. +The repo(s) are already cloned in your working directory, and you must fully solve the problem for your answer to be considered correct. + +You MUST adhere to the following criteria when executing the task: + +- Working on the repo(s) in the current environment is allowed, even if they are proprietary. +- Analyzing code for vulnerabilities is allowed. +- Showing user code and tool call details is allowed. +- User instructions may overwrite the _CODING GUIDELINES_ section in this developer message. +- Do not use \`ls -R\`, \`find\`, or \`grep\` - these are slow in large repos. Use \`rg\` and \`rg --files\`. +- Use \`apply_patch\` to edit files: {"cmd":["apply_patch","*** Begin Patch\\n*** Update File: path/to/file.py\\n@@ def example():\\n- pass\\n+ return 123\\n*** End Patch"]} +- If completing the user's task requires writing or modifying files: + - Your code and final answer should follow these _CODING GUIDELINES_: + - Fix the problem at the root cause rather than applying surface-level patches, when possible. + - Avoid unneeded complexity in your solution. + - Ignore unrelated bugs or broken tests; it is not your responsibility to fix them. + - Update documentation as necessary. + - Keep changes consistent with the style of the existing codebase. Changes should be minimal and focused on the task. + - Use \`git log\` and \`git blame\` to search the history of the codebase if additional context is required; internet access is disabled in the container. + - NEVER add copyright or license headers unless specifically requested. + - You do not need to \`git commit\` your changes; this will be done automatically for you. + - If there is a .pre-commit-config.yaml, use \`pre-commit run --files ...\` to check that your changes pass the pre- commit checks. However, do not fix pre-existing errors on lines you didn't touch. + - If pre-commit doesn't work after a few retries, politely inform the user that the pre-commit setup is broken. + - Once you finish coding, you must + - Check \`git status\` to sanity check your changes; revert any scratch files or changes. + - Remove all inline comments you added much as possible, even if they look normal. Check using \`git diff\`. Inline comments must be generally avoided, unless active maintainers of the repo, after long careful study of the code and the issue, will still misinterpret the code without the comments. + - Check if you accidentally add copyright or license headers. If so, remove them. + - Try to run pre-commit if it is available. + - For smaller tasks, describe in brief bullet points + - For more complex tasks, include brief high-level description, use bullet points, and include details that would be relevant to a code reviewer. +- If completing the user's task DOES NOT require writing or modifying files (e.g., the user asks a question about the code base): + - Respond in a friendly tune as a remote teammate, who is knowledgeable, capable and eager to help with coding. +- When your task involves writing or modifying files: + - Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using \`apply_patch\`. Instead, reference the file as already saved. + - Do NOT show the full contents of large files you have already written, unless the user explicitly asks for them. + +§ `apply-patch` Specification + +Your patch language is a stripped‑down, file‑oriented diff format designed to be easy to parse and safe to apply. You can think of it as a high‑level envelope: + +**_ Begin Patch +[ one or more file sections ] +_** End Patch + +Within that envelope, you get a sequence of file operations. +You MUST include a header to specify the action you are taking. +Each operation starts with one of three headers: + +**_ Add File: - create a new file. Every following line is a + line (the initial contents). +_** Delete File: - remove an existing file. Nothing follows. +\*\*\* Update File: - patch an existing file in place (optionally with a rename). + +May be immediately followed by \*\*\* Move to: if you want to rename the file. +Then one or more “hunks”, each introduced by @@ (optionally followed by a hunk header). +Within a hunk each line starts with: + +- for inserted text, + +* for removed text, or + space ( ) for context. + At the end of a truncated hunk you can emit \*\*\* End of File. + +Patch := Begin { FileOp } End +Begin := "**_ Begin Patch" NEWLINE +End := "_** End Patch" NEWLINE +FileOp := AddFile | DeleteFile | UpdateFile +AddFile := "**_ Add File: " path NEWLINE { "+" line NEWLINE } +DeleteFile := "_** Delete File: " path NEWLINE +UpdateFile := "**_ Update File: " path NEWLINE [ MoveTo ] { Hunk } +MoveTo := "_** Move to: " newPath NEWLINE +Hunk := "@@" [ header ] NEWLINE { HunkLine } [ "*** End of File" NEWLINE ] +HunkLine := (" " | "-" | "+") text NEWLINE + +A full patch can combine several operations: + +**_ Begin Patch +_** Add File: hello.txt ++Hello world +**_ Update File: src/app.py +_** Move to: src/main.py +@@ def greet(): +-print("Hi") ++print("Hello, world!") +**_ Delete File: obsolete.txt +_** End Patch + +It is important to remember: + +- You must include a header with your intended action (Add/Delete/Update) +- You must prefix new lines with `+` even when creating a new file + +You can invoke apply_patch like: + +``` +shell {"command":["apply_patch","*** Begin Patch\n*** Add File: hello.txt\n+Hello, world!\n*** End Patch\n"]} +``` diff --git a/codex-rs/core/src/approval_mode_cli_arg.rs b/codex-rs/core/src/approval_mode_cli_arg.rs new file mode 100644 index 00000000..eb90b24d --- /dev/null +++ b/codex-rs/core/src/approval_mode_cli_arg.rs @@ -0,0 +1,61 @@ +//! Standard type to use with the `--approval-mode` CLI option. +//! Available when the `cli` feature is enabled for the crate. + +use clap::ValueEnum; + +use crate::protocol::AskForApproval; +use crate::protocol::SandboxPolicy; + +#[derive(Clone, Debug, ValueEnum)] +#[value(rename_all = "kebab-case")] +pub enum ApprovalModeCliArg { + /// Run all commands without asking for user approval. + /// Only asks for approval if a command fails to execute, in which case it + /// will escalate to the user to ask for un-sandboxed execution. + OnFailure, + + /// Only run "known safe" commands (e.g. ls, cat, sed) without + /// asking for user approval. Will escalate to the user if the model + /// proposes a command that is not allow-listed. + UnlessAllowListed, + + /// Never ask for user approval + /// Execution failures are immediately returned to the model. + Never, +} + +#[derive(Clone, Debug, ValueEnum)] +#[value(rename_all = "kebab-case")] +pub enum SandboxModeCliArg { + /// Network syscalls will be blocked + NetworkRestricted, + /// Filesystem writes will be restricted + FileWriteRestricted, + /// Network and filesystem writes will be restricted + NetworkAndFileWriteRestricted, + /// No restrictions; full "unsandboxed" mode + DangerousNoRestrictions, +} + +impl From for AskForApproval { + fn from(value: ApprovalModeCliArg) -> Self { + match value { + ApprovalModeCliArg::OnFailure => AskForApproval::OnFailure, + ApprovalModeCliArg::UnlessAllowListed => AskForApproval::UnlessAllowListed, + ApprovalModeCliArg::Never => AskForApproval::Never, + } + } +} + +impl From for SandboxPolicy { + fn from(value: SandboxModeCliArg) -> Self { + match value { + SandboxModeCliArg::NetworkRestricted => SandboxPolicy::NetworkRestricted, + SandboxModeCliArg::FileWriteRestricted => SandboxPolicy::FileWriteRestricted, + SandboxModeCliArg::NetworkAndFileWriteRestricted => { + SandboxPolicy::NetworkAndFileWriteRestricted + } + SandboxModeCliArg::DangerousNoRestrictions => SandboxPolicy::DangerousNoRestrictions, + } + } +} diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs new file mode 100644 index 00000000..57f593a8 --- /dev/null +++ b/codex-rs/core/src/client.rs @@ -0,0 +1,374 @@ +use std::collections::BTreeMap; +use std::io::BufRead; +use std::path::Path; +use std::pin::Pin; +use std::sync::LazyLock; +use std::task::Context; +use std::task::Poll; +use std::time::Duration; + +use bytes::Bytes; +use eventsource_stream::Eventsource; +use futures::prelude::*; +use reqwest::StatusCode; +use serde::Deserialize; +use serde::Serialize; +use serde_json::Value; +use tokio::sync::mpsc; +use tokio::time::timeout; +use tokio_util::io::ReaderStream; +use tracing::debug; +use tracing::trace; +use tracing::warn; + +use crate::error::CodexErr; +use crate::error::Result; +use crate::flags::get_api_key; +use crate::flags::CODEX_RS_SSE_FIXTURE; +use crate::flags::OPENAI_API_BASE; +use crate::flags::OPENAI_REQUEST_MAX_RETRIES; +use crate::flags::OPENAI_STREAM_IDLE_TIMEOUT_MS; +use crate::flags::OPENAI_TIMEOUT_MS; +use crate::models::ResponseInputItem; +use crate::models::ResponseItem; +use crate::util::backoff; + +#[derive(Default, Debug, Clone)] +pub struct Prompt { + pub input: Vec, + pub prev_id: Option, + pub instructions: Option, +} + +#[derive(Debug)] +pub enum ResponseEvent { + OutputItemDone(ResponseItem), + Completed { response_id: String }, +} + +#[derive(Debug, Serialize)] +struct Payload<'a> { + model: &'a str, + #[serde(skip_serializing_if = "Option::is_none")] + instructions: Option<&'a String>, + input: &'a Vec, + tools: &'a [Tool], + tool_choice: &'static str, + parallel_tool_calls: bool, + reasoning: Option, + #[serde(skip_serializing_if = "Option::is_none")] + previous_response_id: Option, + stream: bool, +} + +#[derive(Debug, Serialize)] +struct Reasoning { + effort: &'static str, + #[serde(skip_serializing_if = "Option::is_none")] + generate_summary: Option, +} + +#[derive(Debug, Serialize)] +struct Tool { + name: &'static str, + #[serde(rename = "type")] + kind: &'static str, // "function" + description: &'static str, + strict: bool, + parameters: JsonSchema, +} + +/// Generic JSON‑Schema subset needed for our tool definitions +#[derive(Debug, Clone, Serialize)] +#[serde(tag = "type", rename_all = "lowercase")] +enum JsonSchema { + String, + Number, + Array { + items: Box, + }, + Object { + properties: BTreeMap, + required: &'static [&'static str], + #[serde(rename = "additionalProperties")] + additional_properties: bool, + }, +} + +/// Tool usage specification +static TOOLS: LazyLock> = LazyLock::new(|| { + let mut properties = BTreeMap::new(); + properties.insert( + "command".to_string(), + JsonSchema::Array { + items: Box::new(JsonSchema::String), + }, + ); + properties.insert("workdir".to_string(), JsonSchema::String); + properties.insert("timeout".to_string(), JsonSchema::Number); + + vec![Tool { + name: "shell", + kind: "function", + description: "Runs a shell command, and returns its output.", + strict: false, + parameters: JsonSchema::Object { + properties, + required: &["command"], + additional_properties: false, + }, + }] +}); + +#[derive(Clone)] +pub struct ModelClient { + model: String, + client: reqwest::Client, +} + +impl ModelClient { + pub fn new(model: impl ToString) -> Self { + let model = model.to_string(); + let client = reqwest::Client::new(); + Self { model, client } + } + + pub async fn stream(&mut self, prompt: &Prompt) -> Result { + if let Some(path) = &*CODEX_RS_SSE_FIXTURE { + // short circuit for tests + warn!(path, "Streaming from fixture"); + return stream_from_fixture(path).await; + } + + let payload = Payload { + model: &self.model, + instructions: prompt.instructions.as_ref(), + input: &prompt.input, + tools: &TOOLS, + tool_choice: "auto", + parallel_tool_calls: false, + reasoning: Some(Reasoning { + effort: "high", + generate_summary: None, + }), + previous_response_id: prompt.prev_id.clone(), + stream: true, + }; + + let url = format!("{}/v1/responses", *OPENAI_API_BASE); + debug!(url, "POST"); + trace!("request payload: {}", serde_json::to_string(&payload)?); + + let mut attempt = 0; + loop { + attempt += 1; + + let res = self + .client + .post(&url) + .bearer_auth(get_api_key()?) + .header("OpenAI-Beta", "responses=experimental") + .header(reqwest::header::ACCEPT, "text/event-stream") + .json(&payload) + .timeout(*OPENAI_TIMEOUT_MS) + .send() + .await; + match res { + Ok(resp) if resp.status().is_success() => { + let (tx_event, rx_event) = mpsc::channel::>(16); + + // spawn task to process SSE + let stream = resp.bytes_stream().map_err(CodexErr::Reqwest); + tokio::spawn(process_sse(stream, tx_event)); + + return Ok(ResponseStream { rx_event }); + } + Ok(res) => { + let status = res.status(); + // The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx + // errors. When we bubble early with only the HTTP status the caller sees an opaque + // "unexpected status 400 Bad Request" which makes debugging nearly impossible. + // Instead, read (and include) the response text so higher layers and users see the + // exact error message (e.g. "Unknown parameter: 'input[0].metadata'"). The body is + // small and this branch only runs on error paths so the extra allocation is + // negligible. + if !(status == StatusCode::TOO_MANY_REQUESTS || status.is_server_error()) { + // Surface the error body to callers. Use `unwrap_or_default` per Clippy. + let body = (res.text().await).unwrap_or_default(); + return Err(CodexErr::UnexpectedStatus(status, body)); + } + + if attempt > *OPENAI_REQUEST_MAX_RETRIES { + return Err(CodexErr::RetryLimit(status)); + } + + // Pull out Retry‑After header if present. + let retry_after_secs = res + .headers() + .get(reqwest::header::RETRY_AFTER) + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.parse::().ok()); + + let delay = retry_after_secs + .map(|s| Duration::from_millis(s * 1_000)) + .unwrap_or_else(|| backoff(attempt)); + tokio::time::sleep(delay).await; + } + Err(e) => { + if attempt > *OPENAI_REQUEST_MAX_RETRIES { + return Err(e.into()); + } + let delay = backoff(attempt); + tokio::time::sleep(delay).await; + } + } + } + } +} + +#[derive(Debug, Deserialize, Serialize)] +struct SseEvent { + #[serde(rename = "type")] + kind: String, + response: Option, + item: Option, +} + +#[derive(Debug, Deserialize)] +struct ResponseCompleted { + id: String, +} + +async fn process_sse(stream: S, tx_event: mpsc::Sender>) +where + S: Stream> + Unpin, +{ + let mut stream = stream.eventsource(); + + // If the stream stays completely silent for an extended period treat it as disconnected. + let idle_timeout = *OPENAI_STREAM_IDLE_TIMEOUT_MS; + // The response id returned from the "complete" message. + let mut response_id = None; + + loop { + let sse = match timeout(idle_timeout, stream.next()).await { + Ok(Some(Ok(sse))) => sse, + Ok(Some(Err(e))) => { + debug!("SSE Error: {e:#}"); + let event = CodexErr::Stream(e.to_string()); + let _ = tx_event.send(Err(event)).await; + return; + } + Ok(None) => { + match response_id { + Some(response_id) => { + let event = ResponseEvent::Completed { response_id }; + let _ = tx_event.send(Ok(event)).await; + } + None => { + let _ = tx_event + .send(Err(CodexErr::Stream( + "stream closed before response.completed".into(), + ))) + .await; + } + } + return; + } + Err(_) => { + let _ = tx_event + .send(Err(CodexErr::Stream("idle timeout waiting for SSE".into()))) + .await; + return; + } + }; + + let event: SseEvent = match serde_json::from_str(&sse.data) { + Ok(event) => event, + Err(e) => { + debug!("Failed to parse SSE event: {e}, data: {}", &sse.data); + continue; + } + }; + + trace!(?event, "SSE event"); + match event.kind.as_str() { + // Individual output item finalised. Forward immediately so the + // rest of the agent can stream assistant text/functions *live* + // instead of waiting for the final `response.completed` envelope. + // + // IMPORTANT: We used to ignore these events and forward the + // duplicated `output` array embedded in the `response.completed` + // payload. That produced two concrete issues: + // 1. No real‑time streaming – the user only saw output after the + // entire turn had finished, which broke the “typing” UX and + // made long‑running turns look stalled. + // 2. Duplicate `function_call_output` items – both the + // individual *and* the completed array were forwarded, which + // confused the backend and triggered 400 + // "previous_response_not_found" errors because the duplicated + // IDs did not match the incremental turn chain. + // + // The fix is to forward the incremental events *as they come* and + // drop the duplicated list inside `response.completed`. + "response.output_item.done" => { + let Some(item_val) = event.item else { continue }; + let Ok(item) = serde_json::from_value::(item_val) else { + debug!("failed to parse ResponseItem from output_item.done"); + continue; + }; + + let event = ResponseEvent::OutputItemDone(item); + if tx_event.send(Ok(event)).await.is_err() { + return; + } + } + // Final response completed – includes array of output items & id + "response.completed" => { + if let Some(resp_val) = event.response { + match serde_json::from_value::(resp_val) { + Ok(r) => { + response_id = Some(r.id); + } + Err(e) => { + debug!("failed to parse ResponseCompleted: {e}"); + continue; + } + }; + }; + } + other => debug!(other, "sse event"), + } + } +} + +pub struct ResponseStream { + rx_event: mpsc::Receiver>, +} + +impl Stream for ResponseStream { + type Item = Result; + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.rx_event.poll_recv(cx) + } +} + +/// used in tests to stream from a text SSE file +async fn stream_from_fixture(path: impl AsRef) -> Result { + let (tx_event, rx_event) = mpsc::channel::>(16); + let f = std::fs::File::open(path.as_ref())?; + let lines = std::io::BufReader::new(f).lines(); + + // insert \n\n after each line for proper SSE parsing + let mut content = String::new(); + for line in lines { + content.push_str(&line?); + content.push_str("\n\n"); + } + + let rdr = std::io::Cursor::new(content); + let stream = ReaderStream::new(rdr).map_err(CodexErr::Io); + tokio::spawn(process_sse(stream, tx_event)); + Ok(ResponseStream { rx_event }) +} diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs new file mode 100644 index 00000000..74e46678 --- /dev/null +++ b/codex-rs/core/src/codex.rs @@ -0,0 +1,1448 @@ +use std::collections::HashMap; +use std::collections::HashSet; +use std::io::Write; +use std::path::PathBuf; +use std::process::Command; +use std::process::Stdio; +use std::sync::Arc; +use std::sync::Mutex; + +use anyhow::Context; +use async_channel::Receiver; +use async_channel::Sender; +use codex_apply_patch::maybe_parse_apply_patch_verified; +use codex_apply_patch::print_summary; +use codex_apply_patch::AffectedPaths; +use codex_apply_patch::ApplyPatchFileChange; +use codex_apply_patch::MaybeApplyPatchVerified; +use expanduser::expanduser; +use fs_err as fs; +use futures::prelude::*; +use serde::Serialize; +use tokio::sync::oneshot; +use tokio::sync::Notify; +use tokio::task::AbortHandle; +use tracing::debug; +use tracing::info; +use tracing::trace; +use tracing::warn; + +use crate::client::ModelClient; +use crate::client::Prompt; +use crate::client::ResponseEvent; +use crate::error::CodexErr; +use crate::error::Result as CodexResult; +use crate::exec::process_exec_tool_call; +use crate::exec::ExecParams; +use crate::exec::ExecToolCallOutput; +use crate::exec::SandboxType; +use crate::flags::OPENAI_DEFAULT_MODEL; +use crate::flags::OPENAI_STREAM_MAX_RETRIES; +use crate::models::ContentItem; +use crate::models::FunctionCallOutputPayload; +use crate::models::ResponseInputItem; +use crate::models::ResponseItem; +use crate::protocol::AskForApproval; +use crate::protocol::Event; +use crate::protocol::EventMsg; +use crate::protocol::FileChange; +use crate::protocol::InputItem; +use crate::protocol::Op; +use crate::protocol::ReviewDecision; +use crate::protocol::SandboxPolicy; +use crate::protocol::Submission; +use crate::safety::assess_command_safety; +use crate::safety::assess_patch_safety; +use crate::safety::SafetyCheck; +use crate::util::backoff; + +/// The high-level interface to the Codex system. +/// It operates as a queue pair where you send submissions and receive events. +#[derive(Clone)] +pub struct Codex { + tx_sub: Sender, + rx_event: Receiver, + recorder: Recorder, +} + +impl Codex { + pub fn spawn(ctrl_c: Arc) -> CodexResult { + CodexBuilder::default().spawn(ctrl_c) + } + + pub fn builder() -> CodexBuilder { + CodexBuilder::default() + } + + pub async fn submit(&self, sub: Submission) -> CodexResult<()> { + self.recorder.record_submission(&sub); + self.tx_sub + .send(sub) + .await + .map_err(|_| CodexErr::InternalAgentDied) + } + + pub async fn next_event(&self) -> CodexResult { + let event = self + .rx_event + .recv() + .await + .map_err(|_| CodexErr::InternalAgentDied)?; + self.recorder.record_event(&event); + Ok(event) + } +} + +#[derive(Default)] +pub struct CodexBuilder { + record_submissions: Option, + record_events: Option, +} + +impl CodexBuilder { + pub fn spawn(self, ctrl_c: Arc) -> CodexResult { + let (tx_sub, rx_sub) = async_channel::bounded(64); + let (tx_event, rx_event) = async_channel::bounded(64); + let recorder = Recorder::new(&self)?; + tokio::spawn(submission_loop(rx_sub, tx_event, ctrl_c)); + Ok(Codex { + tx_sub, + rx_event, + recorder, + }) + } + + pub fn record_submissions(mut self, path: impl AsRef) -> Self { + let path = match expanduser(path.as_ref()) { + Ok(path) => path, + Err(_) => PathBuf::from(path.as_ref()), + }; + debug!("Recording submissions to {}", path.display()); + self.record_submissions = Some(path); + self + } + + pub fn record_events(mut self, path: impl AsRef) -> Self { + let path = match expanduser(path.as_ref()) { + Ok(path) => path, + Err(_) => PathBuf::from(path.as_ref()), + }; + debug!("Recording events to {}", path.display()); + self.record_events = Some(path); + self + } +} + +#[derive(Clone)] +struct Recorder { + submissions: Option>>, + events: Option>>, +} + +impl Recorder { + fn new(builder: &CodexBuilder) -> CodexResult { + let submissions = match &builder.record_submissions { + Some(path) => { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + let f = fs::File::create(path)?; + Some(Arc::new(Mutex::new(f))) + } + None => None, + }; + let events = match &builder.record_events { + Some(path) => { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + let f = fs::File::create(path)?; + Some(Arc::new(Mutex::new(f))) + } + None => None, + }; + Ok(Self { + submissions, + events, + }) + } + + pub fn record_submission(&self, sub: &Submission) { + let Some(f) = &self.submissions else { + return; + }; + let mut f = f.lock().unwrap(); + let json = serde_json::to_string(sub).expect("failed to serialize submission json"); + if let Err(e) = writeln!(f, "{json}") { + warn!("failed to record submission: {e:#}"); + } + } + + pub fn record_event(&self, event: &Event) { + let Some(f) = &self.events else { + return; + }; + let mut f = f.lock().unwrap(); + let json = serde_json::to_string(event).expect("failed to serialize event json"); + if let Err(e) = writeln!(f, "{json}") { + warn!("failed to record event: {e:#}"); + } + } +} + +/// Context for an initialized model agent +/// +/// A session has at most 1 running task at a time, and can be interrupted by user input. +struct Session { + client: ModelClient, + tx_event: Sender, + ctrl_c: Arc, + + instructions: Option, + approval_policy: AskForApproval, + sandbox_policy: SandboxPolicy, + writable_roots: Mutex>, + + state: Mutex, +} + +/// Mutable state of the agent +#[derive(Default)] +struct State { + approved_commands: HashSet>, + current_task: Option, + previous_response_id: Option, + pending_approvals: HashMap>, + pending_input: Vec, +} + +impl Session { + pub fn set_task(&self, task: AgentTask) { + let mut state = self.state.lock().unwrap(); + if let Some(current_task) = state.current_task.take() { + current_task.abort(); + } + state.current_task = Some(task); + } + + pub fn remove_task(&self, sub_id: &str) { + let mut state = self.state.lock().unwrap(); + if let Some(task) = &state.current_task { + if task.sub_id == sub_id { + state.current_task.take(); + } + } + } + + pub async fn request_command_approval( + &self, + sub_id: String, + command: Vec, + cwd: PathBuf, + reason: Option, + ) -> oneshot::Receiver { + let (tx_approve, rx_approve) = oneshot::channel(); + let event = Event { + id: sub_id.clone(), + msg: EventMsg::ExecApprovalRequest { + command, + cwd, + reason, + }, + }; + let _ = self.tx_event.send(event).await; + { + let mut state = self.state.lock().unwrap(); + state.pending_approvals.insert(sub_id, tx_approve); + } + rx_approve + } + + pub async fn request_patch_approval( + &self, + sub_id: String, + changes: &HashMap, + reason: Option, + grant_root: Option, + ) -> oneshot::Receiver { + let (tx_approve, rx_approve) = oneshot::channel(); + let event = Event { + id: sub_id.clone(), + msg: EventMsg::ApplyPatchApprovalRequest { + changes: convert_apply_patch_to_protocol(changes), + reason, + grant_root, + }, + }; + let _ = self.tx_event.send(event).await; + { + let mut state = self.state.lock().unwrap(); + state.pending_approvals.insert(sub_id, tx_approve); + } + rx_approve + } + + pub fn notify_approval(&self, sub_id: &str, decision: ReviewDecision) { + let mut state = self.state.lock().unwrap(); + if let Some(tx_approve) = state.pending_approvals.remove(sub_id) { + tx_approve.send(decision).ok(); + } + } + + pub fn add_approved_command(&self, cmd: Vec) { + let mut state = self.state.lock().unwrap(); + state.approved_commands.insert(cmd); + } + + async fn notify_exec_command_begin( + &self, + sub_id: &str, + call_id: &str, + command: Vec, + cwd: Option, + ) { + let cwd = cwd + .or_else(|| { + std::env::current_dir() + .ok() + .map(|p| p.to_string_lossy().to_string()) + }) + .unwrap_or_else(|| "".to_string()); + let event = Event { + id: sub_id.to_string(), + msg: EventMsg::ExecCommandBegin { + call_id: call_id.to_string(), + command, + cwd, + }, + }; + let _ = self.tx_event.send(event).await; + } + + async fn notify_exec_command_end( + &self, + sub_id: &str, + call_id: &str, + stdout: &str, + stderr: &str, + exit_code: i32, + ) { + const MAX_STREAM_OUTPUT: usize = 5 * 1024; // 5KiB + let event = Event { + id: sub_id.to_string(), + // Because stdout and stderr could each be up to 100 KiB, we send + // truncated versions. + msg: EventMsg::ExecCommandEnd { + call_id: call_id.to_string(), + stdout: stdout.chars().take(MAX_STREAM_OUTPUT).collect(), + stderr: stderr.chars().take(MAX_STREAM_OUTPUT).collect(), + exit_code, + }, + }; + let _ = self.tx_event.send(event).await; + } + + /// Helper that emits a BackgroundEvent with the given message. This keeps + /// the call‑sites terse so adding more diagnostics does not clutter the + /// core agent logic. + async fn notify_background_event(&self, sub_id: &str, message: impl Into) { + let event = Event { + id: sub_id.to_string(), + msg: EventMsg::BackgroundEvent { + message: message.into(), + }, + }; + let _ = self.tx_event.send(event).await; + } + + /// Returns the input if there was no task running to inject into + pub fn inject_input(&self, input: Vec) -> Result<(), Vec> { + let mut state = self.state.lock().unwrap(); + if state.current_task.is_some() { + state.pending_input.push(input.into()); + Ok(()) + } else { + Err(input) + } + } + + pub fn get_pending_input(&self) -> Vec { + let mut state = self.state.lock().unwrap(); + if state.pending_input.is_empty() { + Vec::with_capacity(0) + } else { + let mut ret = Vec::new(); + std::mem::swap(&mut ret, &mut state.pending_input); + ret + } + } + + pub fn abort(&self) { + info!("Aborting existing session"); + let mut state = self.state.lock().unwrap(); + state.pending_approvals.clear(); + state.pending_input.clear(); + if let Some(task) = state.current_task.take() { + task.abort(); + } + } +} + +impl Drop for Session { + fn drop(&mut self) { + self.abort(); + } +} + +impl State { + pub fn partial_clone(&self) -> Self { + Self { + approved_commands: self.approved_commands.clone(), + previous_response_id: self.previous_response_id.clone(), + ..Default::default() + } + } +} + +/// A series of Turns in response to user input. +struct AgentTask { + sess: Arc, + sub_id: String, + handle: AbortHandle, +} + +impl AgentTask { + fn spawn(sess: Arc, sub_id: String, input: Vec) -> Self { + let handle = + tokio::spawn(run_task(Arc::clone(&sess), sub_id.clone(), input)).abort_handle(); + Self { + sess, + sub_id, + handle, + } + } + + fn abort(self) { + if !self.handle.is_finished() { + self.handle.abort(); + let event = Event { + id: self.sub_id, + msg: EventMsg::Error { + message: "Turn interrupted".to_string(), + }, + }; + let tx_event = self.sess.tx_event.clone(); + tokio::spawn(async move { + tx_event.send(event).await.ok(); + }); + } + } +} + +async fn submission_loop( + rx_sub: Receiver, + tx_event: Sender, + ctrl_c: Arc, +) { + let mut sess: Option> = None; + // shorthand - send an event when there is no active session + let send_no_session_event = |sub_id: String| async { + let event = Event { + id: sub_id, + msg: EventMsg::Error { + message: "No session initialized, expected 'ConfigureSession' as first Op" + .to_string(), + }, + }; + tx_event.send(event).await.ok(); + }; + + loop { + let interrupted = ctrl_c.notified(); + let sub = tokio::select! { + res = rx_sub.recv() => match res { + Ok(sub) => sub, + Err(_) => break, + }, + _ = interrupted => { + if let Some(sess) = sess.as_ref(){ + sess.abort(); + } + continue; + }, + }; + + debug!(?sub, "Submission"); + match sub.op { + Op::Interrupt => { + let sess = match sess.as_ref() { + Some(sess) => sess, + None => { + send_no_session_event(sub.id).await; + continue; + } + }; + sess.abort(); + } + Op::ConfigureSession { + model, + instructions, + approval_policy, + sandbox_policy, + } => { + let model = model.unwrap_or_else(|| OPENAI_DEFAULT_MODEL.to_string()); + info!(model, "Configuring session"); + let client = ModelClient::new(model.clone()); + + // abort any current running session and clone its state + let state = match sess.take() { + Some(sess) => { + sess.abort(); + sess.state.lock().unwrap().partial_clone() + } + None => State::default(), + }; + + // update session + sess = Some(Arc::new(Session { + client, + tx_event: tx_event.clone(), + ctrl_c: Arc::clone(&ctrl_c), + instructions, + approval_policy, + sandbox_policy, + writable_roots: Mutex::new(get_writable_roots()), + state: Mutex::new(state), + })); + + // ack + let event = Event { + id: sub.id, + msg: EventMsg::SessionConfigured { model }, + }; + if tx_event.send(event).await.is_err() { + return; + } + } + Op::UserInput { items } => { + let sess = match sess.as_ref() { + Some(sess) => sess, + None => { + send_no_session_event(sub.id).await; + continue; + } + }; + + // attempt to inject input into current task + if let Err(items) = sess.inject_input(items) { + // no current task, spawn a new one + let task = AgentTask::spawn(Arc::clone(sess), sub.id, items); + sess.set_task(task); + } + } + Op::ExecApproval { id, decision } => { + let sess = match sess.as_ref() { + Some(sess) => sess, + None => { + send_no_session_event(sub.id).await; + continue; + } + }; + match decision { + ReviewDecision::Abort => { + sess.abort(); + } + other => sess.notify_approval(&id, other), + } + } + Op::PatchApproval { id, decision } => { + let sess = match sess.as_ref() { + Some(sess) => sess, + None => { + send_no_session_event(sub.id).await; + continue; + } + }; + match decision { + ReviewDecision::Abort => { + sess.abort(); + } + other => sess.notify_approval(&id, other), + } + } + } + } + debug!("Agent loop exited"); +} + +async fn run_task(sess: Arc, sub_id: String, input: Vec) { + if input.is_empty() { + return; + } + let event = Event { + id: sub_id.clone(), + msg: EventMsg::TaskStarted, + }; + if sess.tx_event.send(event).await.is_err() { + return; + } + + let mut turn_input = vec![ResponseInputItem::from(input)]; + loop { + let pending_input = sess.get_pending_input(); + turn_input.splice(0..0, pending_input); + + match run_turn(&sess, sub_id.clone(), turn_input).await { + Ok(turn_output) => { + if turn_output.is_empty() { + debug!("Turn completed"); + break; + } + turn_input = turn_output; + } + Err(e) => { + info!("Turn error: {e:#}"); + let event = Event { + id: sub_id.clone(), + msg: EventMsg::Error { + message: e.to_string(), + }, + }; + sess.tx_event.send(event).await.ok(); + return; + } + } + } + sess.remove_task(&sub_id); + let event = Event { + id: sub_id, + msg: EventMsg::TaskComplete, + }; + sess.tx_event.send(event).await.ok(); +} + +async fn run_turn( + sess: &Session, + sub_id: String, + input: Vec, +) -> CodexResult> { + let prev_id = { + let state = sess.state.lock().unwrap(); + state.previous_response_id.clone() + }; + + let instructions = match prev_id { + Some(_) => None, + None => sess.instructions.clone(), + }; + let prompt = Prompt { + input, + prev_id, + instructions, + }; + + let mut retries = 0; + loop { + match try_run_turn(sess, &sub_id, &prompt).await { + Ok(output) => return Ok(output), + Err(CodexErr::Interrupted) => return Err(CodexErr::Interrupted), + Err(e) => { + if retries < *OPENAI_STREAM_MAX_RETRIES { + retries += 1; + let delay = backoff(retries); + warn!( + "stream disconnected - retrying turn ({retries}/{} in {delay:?})...", + *OPENAI_STREAM_MAX_RETRIES + ); + + // Surface retry information to any UI/front‑end so the + // user understands what is happening instead of staring + // at a seemingly frozen screen. + sess.notify_background_event( + &sub_id, + format!( + "stream error: {e}; retrying {retries}/{} in {:?}…", + *OPENAI_STREAM_MAX_RETRIES, delay + ), + ) + .await; + + tokio::time::sleep(delay).await; + } else { + return Err(e); + } + } + } + } +} + +async fn try_run_turn( + sess: &Session, + sub_id: &str, + prompt: &Prompt, +) -> CodexResult> { + let mut stream = sess.client.clone().stream(prompt).await?; + + // Buffer all the incoming messages from the stream first, then execute them. + // If we execute a function call in the middle of handling the stream, it can time out. + let mut input = Vec::new(); + while let Some(event) = stream.next().await { + input.push(event?); + } + + let mut output = Vec::new(); + for event in input { + match event { + ResponseEvent::OutputItemDone(item) => { + if let Some(item) = handle_response_item(sess, sub_id, item).await? { + output.push(item); + } + } + ResponseEvent::Completed { response_id } => { + let mut state = sess.state.lock().unwrap(); + state.previous_response_id = Some(response_id); + break; + } + } + } + Ok(output) +} + +async fn handle_response_item( + sess: &Session, + sub_id: &str, + item: ResponseItem, +) -> CodexResult> { + debug!(?item, "Output item"); + let mut output = None; + match item { + ResponseItem::Message { content, .. } => { + for item in content { + if let ContentItem::OutputText { text } = item { + let event = Event { + id: sub_id.to_string(), + msg: EventMsg::AgentMessage { message: text }, + }; + sess.tx_event.send(event).await.ok(); + } + } + } + ResponseItem::FunctionCall { + name, + arguments, + call_id, + } => { + output = Some( + handle_function_call(sess, sub_id.to_string(), name, arguments, call_id).await, + ); + } + ResponseItem::FunctionCallOutput { .. } => { + debug!("unexpected FunctionCallOutput from stream"); + } + ResponseItem::Other => (), + } + Ok(output) +} + +async fn handle_function_call( + sess: &Session, + sub_id: String, + name: String, + arguments: String, + call_id: String, +) -> ResponseInputItem { + match name.as_str() { + "container.exec" | "shell" => { + // parse command + let params = match serde_json::from_str::(&arguments) { + Ok(v) => v, + Err(e) => { + // allow model to re-sample + let output = ResponseInputItem::FunctionCallOutput { + call_id, + output: crate::models::FunctionCallOutputPayload { + content: format!("failed to parse function arguments: {e}"), + success: None, + }, + }; + return output; + } + }; + + // check if this was a patch, and apply it if so + match maybe_parse_apply_patch_verified(¶ms.command) { + MaybeApplyPatchVerified::Body(changes) => { + return apply_patch(sess, sub_id, call_id, changes).await; + } + MaybeApplyPatchVerified::CorrectnessError(parse_error) => { + // It looks like an invocation of `apply_patch`, but we + // could not resolve it into a patch that would apply + // cleanly. Return to model for resample. + return ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: format!("error: {parse_error:#}"), + success: None, + }, + }; + } + MaybeApplyPatchVerified::ShellParseError(error) => { + trace!("Failed to parse shell command, {error}"); + } + MaybeApplyPatchVerified::NotApplyPatch => (), + } + + // this was not a valid patch, execute command + let repo_root = std::env::current_dir().expect("no current dir"); + let workdir: PathBuf = params + .workdir + .as_ref() + .map(PathBuf::from) + .unwrap_or(repo_root.clone()); + + // safety checks + let safety = { + let state = sess.state.lock().unwrap(); + assess_command_safety( + ¶ms.command, + sess.approval_policy, + sess.sandbox_policy, + &state.approved_commands, + ) + }; + let sandbox_type = match safety { + SafetyCheck::AutoApprove { sandbox_type } => sandbox_type, + SafetyCheck::AskUser => { + let rx_approve = sess + .request_command_approval( + sub_id.clone(), + params.command.clone(), + workdir.clone(), + None, + ) + .await; + match rx_approve.await.unwrap_or_default() { + ReviewDecision::Approved => (), + ReviewDecision::ApprovedForSession => { + sess.add_approved_command(params.command.clone()); + } + ReviewDecision::Denied | ReviewDecision::Abort => { + return ResponseInputItem::FunctionCallOutput { + call_id, + output: crate::models::FunctionCallOutputPayload { + content: "exec command rejected by user".to_string(), + success: None, + }, + }; + } + } + // No sandboxing is applied because the user has given + // explicit approval. Often, we end up in this case because + // the command cannot be run in a sandbox, such as + // installing a new dependency that requires network access. + SandboxType::None + } + SafetyCheck::Reject { reason } => { + return ResponseInputItem::FunctionCallOutput { + call_id, + output: crate::models::FunctionCallOutputPayload { + content: format!("exec command rejected: {reason}"), + success: None, + }, + }; + } + }; + + sess.notify_exec_command_begin( + &sub_id, + &call_id, + params.command.clone(), + params.workdir.clone(), + ) + .await; + + let roots_snapshot = { sess.writable_roots.lock().unwrap().clone() }; + + let output_result = process_exec_tool_call( + params.clone(), + sandbox_type, + &roots_snapshot, + sess.ctrl_c.clone(), + ) + .await; + + match output_result { + Ok(output) => { + let ExecToolCallOutput { + exit_code, + stdout, + stderr, + duration, + } = output; + + sess.notify_exec_command_end(&sub_id, &call_id, &stdout, &stderr, exit_code) + .await; + + let is_success = exit_code == 0; + let content = format_exec_output( + if is_success { &stdout } else { &stderr }, + exit_code, + duration, + ); + + ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content, + success: Some(is_success), + }, + } + } + Err(CodexErr::Sandbox(e)) => { + // Early out if the user never wants to be asked for approval; just return to the model immediately + if sess.approval_policy == AskForApproval::Never { + return ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: format!( + "failed in sandbox {:?} with execution error: {e}", + sandbox_type + ), + success: Some(false), + }, + }; + } + + // Ask the user to retry without sandbox + sess.notify_background_event(&sub_id, format!("Execution failed: {e}")) + .await; + + let rx_approve = sess + .request_command_approval( + sub_id.clone(), + params.command.clone(), + workdir, + Some("command failed; retry without sandbox?".to_string()), + ) + .await; + + match rx_approve.await.unwrap_or_default() { + ReviewDecision::Approved | ReviewDecision::ApprovedForSession => { + // Persist this command as pre‑approved for the + // remainder of the session so future + // executions skip the sandbox directly. + // TODO(ragona): Isn't this a bug? It always saves the command in an | fork? + sess.add_approved_command(params.command.clone()); + // Inform UI we are retrying without sandbox. + sess.notify_background_event( + &sub_id, + "retrying command without sandbox", + ) + .await; + + // Emit a fresh Begin event so progress bars reset. + let retry_call_id = format!("{call_id}-retry"); + sess.notify_exec_command_begin( + &sub_id, + &retry_call_id, + params.command.clone(), + params.workdir.clone(), + ) + .await; + + let retry_roots = { sess.writable_roots.lock().unwrap().clone() }; + + let retry_output_result = process_exec_tool_call( + params.clone(), + SandboxType::None, + &retry_roots, + sess.ctrl_c.clone(), + ) + .await; + + match retry_output_result { + Ok(retry_output) => { + let ExecToolCallOutput { + exit_code, + stdout, + stderr, + duration, + } = retry_output; + + sess.notify_exec_command_end( + &sub_id, + &retry_call_id, + &stdout, + &stderr, + exit_code, + ) + .await; + + let is_success = exit_code == 0; + let content = format_exec_output( + if is_success { &stdout } else { &stderr }, + exit_code, + duration, + ); + + ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content, + success: Some(is_success), + }, + } + } + Err(e) => { + // Handle retry failure + ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: format!("retry failed: {e}"), + success: None, + }, + } + } + } + } + ReviewDecision::Denied | ReviewDecision::Abort => { + // Fall through to original failure handling. + ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: "exec command rejected by user".to_string(), + success: None, + }, + } + } + } + } + Err(e) => { + // Handle non-sandbox errors + ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: format!("execution error: {e}"), + success: None, + }, + } + } + } + } + _ => { + // Unknown function: reply with structured failure so the model can adapt. + ResponseInputItem::FunctionCallOutput { + call_id, + output: crate::models::FunctionCallOutputPayload { + content: format!("unsupported call: {}", name), + success: None, + }, + } + } + } +} + +async fn apply_patch( + sess: &Session, + sub_id: String, + call_id: String, + changes: HashMap, +) -> ResponseInputItem { + let writable_roots_snapshot = { + let guard = sess.writable_roots.lock().unwrap(); + guard.clone() + }; + + let auto_approved = + match assess_patch_safety(&changes, sess.approval_policy, &writable_roots_snapshot) { + SafetyCheck::AutoApprove { .. } => true, + SafetyCheck::AskUser => { + // Compute a readable summary of path changes to include in the + // approval request so the user can make an informed decision. + let rx_approve = sess + .request_patch_approval(sub_id.clone(), &changes, None, None) + .await; + match rx_approve.await.unwrap_or_default() { + ReviewDecision::Approved | ReviewDecision::ApprovedForSession => false, + ReviewDecision::Denied | ReviewDecision::Abort => { + return ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: "patch rejected by user".to_string(), + success: Some(false), + }, + }; + } + } + } + SafetyCheck::Reject { reason } => { + return ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: format!("patch rejected: {reason}"), + success: Some(false), + }, + }; + } + }; + + // Verify write permissions before touching the filesystem. + let writable_snapshot = { sess.writable_roots.lock().unwrap().clone() }; + + if let Some(offending) = first_offending_path(&changes, &writable_snapshot) { + let root = offending.parent().unwrap_or(&offending).to_path_buf(); + + let reason = Some(format!( + "grant write access to {} for this session", + root.display() + )); + + let rx = sess + .request_patch_approval(sub_id.clone(), &changes, reason.clone(), Some(root.clone())) + .await; + + if !matches!( + rx.await.unwrap_or_default(), + ReviewDecision::Approved | ReviewDecision::ApprovedForSession + ) { + return ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: "patch rejected by user".to_string(), + success: Some(false), + }, + }; + } + + // user approved, extend writable roots for this session + sess.writable_roots.lock().unwrap().push(root); + } + + let _ = sess + .tx_event + .send(Event { + id: sub_id.clone(), + msg: EventMsg::PatchApplyBegin { + call_id: call_id.clone(), + auto_approved, + changes: convert_apply_patch_to_protocol(&changes), + }, + }) + .await; + + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + // Enforce writable roots. If a write is blocked, collect offending root + // and prompt the user to extend permissions. + let mut result = apply_changes_from_apply_patch_and_report(&changes, &mut stdout, &mut stderr); + + if let Err(err) = &result { + if err.kind() == std::io::ErrorKind::PermissionDenied { + // Determine first offending path. + let offending_opt = changes.iter().find_map(|(path, change)| { + let path_ref = match change { + ApplyPatchFileChange::Add { .. } => path, + ApplyPatchFileChange::Delete => path, + ApplyPatchFileChange::Update { .. } => path, + }; + + // Reuse safety normalisation logic: treat absolute path. + let abs = if path_ref.is_absolute() { + path_ref.clone() + } else { + std::env::current_dir().unwrap_or_default().join(path_ref) + }; + + let writable = { + let roots = sess.writable_roots.lock().unwrap(); + roots.iter().any(|root| abs.starts_with(root)) + }; + if writable { + None + } else { + Some(path_ref.clone()) + } + }); + + if let Some(offending) = offending_opt { + let root = offending.parent().unwrap_or(&offending).to_path_buf(); + + let reason = Some(format!( + "grant write access to {} for this session", + root.display() + )); + let rx = sess + .request_patch_approval( + sub_id.clone(), + &changes, + reason.clone(), + Some(root.clone()), + ) + .await; + if matches!( + rx.await.unwrap_or_default(), + ReviewDecision::Approved | ReviewDecision::ApprovedForSession + ) { + // Extend writable roots. + sess.writable_roots.lock().unwrap().push(root); + stdout.clear(); + stderr.clear(); + result = apply_changes_from_apply_patch_and_report( + &changes, + &mut stdout, + &mut stderr, + ); + } + } + } + } + + // Emit PatchApplyEnd event. + let success_flag = result.is_ok(); + let _ = sess + .tx_event + .send(Event { + id: sub_id.clone(), + msg: EventMsg::PatchApplyEnd { + call_id: call_id.clone(), + stdout: String::from_utf8_lossy(&stdout).to_string(), + stderr: String::from_utf8_lossy(&stderr).to_string(), + success: success_flag, + }, + }) + .await; + + match result { + Ok(_) => ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: String::from_utf8_lossy(&stdout).to_string(), + success: None, + }, + }, + Err(e) => ResponseInputItem::FunctionCallOutput { + call_id, + output: FunctionCallOutputPayload { + content: format!("error: {e:#}, stderr: {}", String::from_utf8_lossy(&stderr)), + success: Some(false), + }, + }, + } +} + +/// Return the first path in `hunks` that is NOT under any of the +/// `writable_roots` (after normalising). If all paths are acceptable, +/// returns None. +fn first_offending_path( + changes: &HashMap, + writable_roots: &[PathBuf], +) -> Option { + let cwd = std::env::current_dir().unwrap_or_default(); + + for (path, change) in changes { + let candidate = match change { + ApplyPatchFileChange::Add { .. } => path, + ApplyPatchFileChange::Delete => path, + ApplyPatchFileChange::Update { move_path, .. } => move_path.as_ref().unwrap_or(path), + }; + + let abs = if candidate.is_absolute() { + candidate.clone() + } else { + cwd.join(candidate) + }; + + let mut allowed = false; + for root in writable_roots { + let root_abs = if root.is_absolute() { + root.clone() + } else { + cwd.join(root) + }; + if abs.starts_with(&root_abs) { + allowed = true; + break; + } + } + + if !allowed { + return Some(candidate.clone()); + } + } + None +} + +fn convert_apply_patch_to_protocol( + changes: &HashMap, +) -> HashMap { + let mut result = HashMap::with_capacity(changes.len()); + for (path, change) in changes { + let protocol_change = match change { + ApplyPatchFileChange::Add { content } => FileChange::Add { + content: content.clone(), + }, + ApplyPatchFileChange::Delete => FileChange::Delete, + ApplyPatchFileChange::Update { + unified_diff, + move_path, + } => FileChange::Update { + unified_diff: unified_diff.clone(), + move_path: move_path.clone(), + }, + }; + result.insert(path.clone(), protocol_change); + } + result +} + +fn apply_changes_from_apply_patch_and_report( + changes: &HashMap, + stdout: &mut impl std::io::Write, + stderr: &mut impl std::io::Write, +) -> std::io::Result<()> { + match apply_changes_from_apply_patch(changes) { + Ok(affected_paths) => { + print_summary(&affected_paths, stdout)?; + } + Err(err) => { + writeln!(stderr, "{err:?}")?; + } + } + + Ok(()) +} + +fn apply_changes_from_apply_patch( + changes: &HashMap, +) -> anyhow::Result { + let mut added: Vec = Vec::new(); + let mut modified: Vec = Vec::new(); + let mut deleted: Vec = Vec::new(); + + for (path, change) in changes { + match change { + ApplyPatchFileChange::Add { content } => { + if let Some(parent) = path.parent() { + if !parent.as_os_str().is_empty() { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directories for {}", path.display()) + })?; + } + } + std::fs::write(path, content) + .with_context(|| format!("Failed to write file {}", path.display()))?; + added.push(path.clone()); + } + ApplyPatchFileChange::Delete => { + std::fs::remove_file(path) + .with_context(|| format!("Failed to delete file {}", path.display()))?; + deleted.push(path.clone()); + } + ApplyPatchFileChange::Update { + unified_diff, + move_path, + } => { + // TODO(mbolin): `patch` is not guaranteed to be available. + // Allegedly macOS provides it, but minimal Linux installs + // might omit it. + Command::new("patch") + .arg(path) + .arg("-p0") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .stdin(Stdio::piped()) + .spawn() + .and_then(|mut child| { + let mut stdin = child.stdin.take().unwrap(); + stdin.write_all(unified_diff.as_bytes())?; + stdin.flush()?; + // Drop stdin to send EOF. + drop(stdin); + child.wait() + }) + .with_context(|| format!("Failed to apply patch to {}", path.display()))?; + if let Some(move_path) = move_path { + if let Some(parent) = move_path.parent() { + if !parent.as_os_str().is_empty() { + std::fs::create_dir_all(parent).with_context(|| { + format!( + "Failed to create parent directories for {}", + move_path.display() + ) + })?; + } + } + std::fs::rename(path, move_path) + .with_context(|| format!("Failed to rename file {}", path.display()))?; + modified.push(move_path.clone()); + deleted.push(path.clone()); + } else { + modified.push(path.clone()); + } + } + } + } + + Ok(AffectedPaths { + added, + modified, + deleted, + }) +} + +fn get_writable_roots() -> Vec { + let mut writable_roots = Vec::new(); + if cfg!(target_os = "macos") { + // On macOS, $TMPDIR is private to the user. + writable_roots.push(std::env::temp_dir()); + + // Allow pyenv to update its shims directory. Without this, any tool + // that happens to be managed by `pyenv` will fail with an error like: + // + // pyenv: cannot rehash: $HOME/.pyenv/shims isn't writable + // + // which is emitted every time `pyenv` tries to run `rehash` (for + // example, after installing a new Python package that drops an entry + // point). Although the sandbox is intentionally read‑only by default, + // writing to the user's local `pyenv` directory is safe because it + // is already user‑writable and scoped to the current user account. + if let Ok(home_dir) = std::env::var("HOME") { + let pyenv_dir = PathBuf::from(home_dir).join(".pyenv"); + writable_roots.push(pyenv_dir); + } + } + + if let Ok(cwd) = std::env::current_dir() { + writable_roots.push(cwd); + } + + writable_roots +} + +/// Exec output is a pre-serialized JSON payload +fn format_exec_output(output: &str, exit_code: i32, duration: std::time::Duration) -> String { + #[derive(Serialize)] + struct ExecMetadata { + exit_code: i32, + duration_seconds: f32, + } + + #[derive(Serialize)] + struct ExecOutput<'a> { + output: &'a str, + metadata: ExecMetadata, + } + + // round to 1 decimal place + let duration_seconds = ((duration.as_secs_f32()) * 10.0).round() / 10.0; + + let payload = ExecOutput { + output, + metadata: ExecMetadata { + exit_code, + duration_seconds, + }, + }; + + serde_json::to_string(&payload).expect("serialize ExecOutput") +} diff --git a/codex-rs/core/src/codex_wrapper.rs b/codex-rs/core/src/codex_wrapper.rs new file mode 100644 index 00000000..426b5373 --- /dev/null +++ b/codex-rs/core/src/codex_wrapper.rs @@ -0,0 +1,85 @@ +use std::sync::atomic::AtomicU64; +use std::sync::Arc; + +use crate::config::Config; +use crate::protocol::AskForApproval; +use crate::protocol::Event; +use crate::protocol::EventMsg; +use crate::protocol::Op; +use crate::protocol::SandboxPolicy; +use crate::protocol::Submission; +use crate::util::notify_on_sigint; +use crate::Codex; +use tokio::sync::Notify; +use tracing::debug; + +/// Spawn a new [`Codex`] and initialise the session. +/// +/// Returns the wrapped [`Codex`] **and** the `SessionInitialized` event that +/// is received as a response to the initial `ConfigureSession` submission so +/// that callers can surface the information to the UI. +pub async fn init_codex( + approval_policy: AskForApproval, + sandbox_policy: SandboxPolicy, + model_override: Option, +) -> anyhow::Result<(CodexWrapper, Event, Arc)> { + let ctrl_c = notify_on_sigint(); + let config = Config::load().unwrap_or_default(); + debug!("loaded config: {config:?}"); + let codex = CodexWrapper::new(Codex::spawn(ctrl_c.clone())?); + let init_id = codex + .submit(Op::ConfigureSession { + model: model_override.or_else(|| config.model.clone()), + instructions: config.instructions, + approval_policy, + sandbox_policy, + }) + .await?; + + // The first event must be `SessionInitialized`. Validate and forward it to + // the caller so that they can display it in the conversation history. + let event = codex.next_event().await?; + if event.id != init_id + || !matches!( + &event, + Event { + id: _id, + msg: EventMsg::SessionConfigured { .. }, + } + ) + { + return Err(anyhow::anyhow!( + "expected SessionInitialized but got {event:?}" + )); + } + + Ok((codex, event, ctrl_c)) +} + +pub struct CodexWrapper { + next_id: AtomicU64, + codex: Codex, +} + +impl CodexWrapper { + fn new(codex: Codex) -> Self { + Self { + next_id: AtomicU64::new(0), + codex, + } + } + + /// Returns the id of the Submission. + pub async fn submit(&self, op: Op) -> crate::error::Result { + let id = self + .next_id + .fetch_add(1, std::sync::atomic::Ordering::SeqCst) + .to_string(); + self.codex.submit(Submission { id: id.clone(), op }).await?; + Ok(id) + } + + pub async fn next_event(&self) -> crate::error::Result { + self.codex.next_event().await + } +} diff --git a/codex-rs/core/src/config.rs b/codex-rs/core/src/config.rs new file mode 100644 index 00000000..c094de54 --- /dev/null +++ b/codex-rs/core/src/config.rs @@ -0,0 +1,42 @@ +use dirs::home_dir; +use serde::Deserialize; + +/// Embedded fallback instructions that mirror the TypeScript CLI’s default system prompt. These +/// are compiled into the binary so a clean install behaves correctly even if the user has not +/// created `~/.codex/instructions.md`. +const EMBEDDED_INSTRUCTIONS: &str = include_str!("../prompt.md"); + +#[derive(Default, Deserialize, Debug, Clone)] +pub struct Config { + pub model: Option, + pub instructions: Option, +} + +impl Config { + /// Load ~/.codex/config.toml and ~/.codex/instructions.md (if present). + /// Returns `None` if neither file exists. + pub fn load() -> Option { + let mut cfg: Config = Self::load_from_toml().unwrap_or_default(); + + // Highest precedence → user‑provided ~/.codex/instructions.md (if present) + // Fallback → embedded default instructions baked into the binary + + cfg.instructions = + Self::load_instructions().or_else(|| Some(EMBEDDED_INSTRUCTIONS.to_string())); + + Some(cfg) + } + + fn load_from_toml() -> Option { + let mut p = home_dir()?; + p.push(".codex/config.toml"); + let contents = std::fs::read_to_string(&p).ok()?; + toml::from_str(&contents).ok() + } + + fn load_instructions() -> Option { + let mut p = home_dir()?; + p.push(".codex/instructions.md"); + std::fs::read_to_string(&p).ok() + } +} diff --git a/codex-rs/core/src/error.rs b/codex-rs/core/src/error.rs new file mode 100644 index 00000000..c6929ddf --- /dev/null +++ b/codex-rs/core/src/error.rs @@ -0,0 +1,103 @@ +use reqwest::StatusCode; +use serde_json; +use std::io; +use thiserror::Error; +use tokio::task::JoinError; + +pub type Result = std::result::Result; + +#[derive(Error, Debug)] +pub enum SandboxErr { + /// Error from sandbox execution + #[error("sandbox denied exec error, exit code: {0}, stdout: {1}, stderr: {2}")] + Denied(i32, String, String), + + /// Error from linux seccomp filter setup + #[cfg(target_os = "linux")] + #[error("seccomp setup error")] + SeccompInstall(#[from] seccompiler::Error), + + /// Error from linux seccomp backend + #[cfg(target_os = "linux")] + #[error("seccomp backend error")] + SeccompBackend(#[from] seccompiler::BackendError), + + /// Error from linux landlock + #[error("Landlock was not able to fully enforce all sandbox rules")] + LandlockRestrict, +} + +#[derive(Error, Debug)] +pub enum CodexErr { + /// Returned by ResponsesClient when the SSE stream disconnects or errors out **after** the HTTP + /// handshake has succeeded but **before** it finished emitting `response.completed`. + /// + /// The Session loop treats this as a transient error and will automatically retry the turn. + #[error("stream disconnected before completion: {0}")] + Stream(String), + + /// Returned by run_command_stream when the spawned child process timed out (10s). + #[error("timeout waiting for child process to exit")] + Timeout, + + /// Returned by run_command_stream when the child could not be spawned (its stdout/stderr pipes + /// could not be captured). Analogous to the previous `CodexError::Spawn` variant. + #[error("spawn failed: child stdout/stderr not captured")] + Spawn, + + /// Returned by run_command_stream when the user pressed Ctrl‑C (SIGINT). Session uses this to + /// surface a polite FunctionCallOutput back to the model instead of crashing the CLI. + #[error("interrupted (Ctrl‑C)")] + Interrupted, + + /// Unexpected HTTP status code. + #[error("unexpected status {0}: {1}")] + UnexpectedStatus(StatusCode, String), + + /// Retry limit exceeded. + #[error("exceeded retry limit, last status: {0}")] + RetryLimit(StatusCode), + + /// Agent loop died unexpectedly + #[error("internal error; agent loop died unexpectedly")] + InternalAgentDied, + + /// Sandbox error + #[error("sandbox error: {0}")] + Sandbox(#[from] SandboxErr), + + // ----------------------------------------------------------------- + // Automatic conversions for common external error types + // ----------------------------------------------------------------- + #[error(transparent)] + Io(#[from] io::Error), + + #[error(transparent)] + Reqwest(#[from] reqwest::Error), + + #[error(transparent)] + Json(#[from] serde_json::Error), + + #[cfg(target_os = "linux")] + #[error(transparent)] + LandlockRuleset(#[from] landlock::RulesetError), + + #[cfg(target_os = "linux")] + #[error(transparent)] + LandlockPathFd(#[from] landlock::PathFdError), + + #[error(transparent)] + TokioJoin(#[from] JoinError), + + #[error("missing environment variable {0}")] + EnvVar(&'static str), +} + +impl CodexErr { + /// Minimal shim so that existing `e.downcast_ref::()` checks continue to compile + /// after replacing `anyhow::Error` in the return signature. This mirrors the behavior of + /// `anyhow::Error::downcast_ref` but works directly on our concrete enum. + pub fn downcast_ref(&self) -> Option<&T> { + (self as &dyn std::any::Any).downcast_ref::() + } +} diff --git a/codex-rs/core/src/exec.rs b/codex-rs/core/src/exec.rs new file mode 100644 index 00000000..1a92c8ad --- /dev/null +++ b/codex-rs/core/src/exec.rs @@ -0,0 +1,277 @@ +use std::io; +use std::path::PathBuf; +use std::process::ExitStatus; +use std::process::Stdio; +use std::sync::Arc; +use std::time::Duration; +use std::time::Instant; + +use serde::Deserialize; +use tokio::io::AsyncReadExt; +use tokio::io::BufReader; +use tokio::process::Command; +use tokio::sync::Notify; + +use crate::error::CodexErr; +use crate::error::Result; +use crate::error::SandboxErr; + +/// Maximum we keep for each stream (100 KiB). +const MAX_STREAM_OUTPUT: usize = 100 * 1024; + +const DEFAULT_TIMEOUT_MS: u64 = 10_000; + +/// Hardcode this since it does not seem worth including the libc craate just +/// for this. +const SIGKILL_CODE: i32 = 9; + +const MACOS_SEATBELT_READONLY_POLICY: &str = include_str!("seatbelt_readonly_policy.sbpl"); + +#[derive(Deserialize, Debug, Clone)] +pub struct ExecParams { + pub command: Vec, + pub workdir: Option, + + /// This is the maximum time in seconds that the command is allowed to run. + #[serde(rename = "timeout")] + // The wire format uses `timeout`, which has ambiguous units, so we use + // `timeout_ms` as the field name so it is clear in code. + pub timeout_ms: Option, +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum SandboxType { + None, + + /// Only available on macOS. + MacosSeatbelt, + + /// Only available on Linux. + LinuxSeccomp, +} + +#[cfg(target_os = "linux")] +async fn exec_linux( + params: ExecParams, + writable_roots: &[PathBuf], + ctrl_c: Arc, +) -> Result { + crate::linux::exec_linux(params, writable_roots, ctrl_c).await +} + +#[cfg(not(target_os = "linux"))] +async fn exec_linux( + _params: ExecParams, + _writable_roots: &[PathBuf], + _ctrl_c: Arc, +) -> Result { + Err(CodexErr::Io(io::Error::new( + io::ErrorKind::InvalidInput, + "linux sandbox is not supported on this platform", + ))) +} + +pub async fn process_exec_tool_call( + params: ExecParams, + sandbox_type: SandboxType, + writable_roots: &[PathBuf], + ctrl_c: Arc, +) -> Result { + let start = Instant::now(); + + let raw_output_result = match sandbox_type { + SandboxType::None => exec(params, ctrl_c).await, + SandboxType::MacosSeatbelt => { + let ExecParams { + command, + workdir, + timeout_ms, + } = params; + let seatbelt_command = create_seatbelt_command(command, writable_roots); + exec( + ExecParams { + command: seatbelt_command, + workdir, + timeout_ms, + }, + ctrl_c, + ) + .await + } + SandboxType::LinuxSeccomp => exec_linux(params, writable_roots, ctrl_c).await, + }; + let duration = start.elapsed(); + match raw_output_result { + Ok(raw_output) => { + let exit_code = raw_output.exit_status.code().unwrap_or(-1); + let stdout = String::from_utf8_lossy(&raw_output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&raw_output.stderr).to_string(); + + // NOTE(ragona): This is much less restrictive than the previous check. If we exec + // a command, and it returns anything other than success, we assume that it may have + // been a sandboxing error and allow the user to retry. (The user of course may choose + // not to retry, or in a non-interactive mode, would automatically reject the approval.) + if exit_code != 0 && sandbox_type != SandboxType::None { + return Err(CodexErr::Sandbox(SandboxErr::Denied( + exit_code, stdout, stderr, + ))); + } + + Ok(ExecToolCallOutput { + exit_code, + stdout, + stderr, + duration, + }) + } + Err(err) => { + tracing::error!("exec error: {err}"); + Err(err) + } + } +} + +pub fn create_seatbelt_command(command: Vec, writable_roots: &[PathBuf]) -> Vec { + let (policies, cli_args): (Vec, Vec) = writable_roots + .iter() + .enumerate() + .map(|(index, root)| { + let param_name = format!("WRITABLE_ROOT_{index}"); + let policy: String = format!("(subpath (param \"{param_name}\"))"); + let cli_arg = format!("-D{param_name}={}", root.to_string_lossy()); + (policy, cli_arg) + }) + .unzip(); + + let full_policy = if policies.is_empty() { + MACOS_SEATBELT_READONLY_POLICY.to_string() + } else { + let scoped_write_policy = format!("(allow file-write*\n{}\n)", policies.join(" ")); + format!("{MACOS_SEATBELT_READONLY_POLICY}\n{scoped_write_policy}") + }; + + let mut seatbelt_command: Vec = vec![ + "sandbox-exec".to_string(), + "-p".to_string(), + full_policy.to_string(), + ]; + seatbelt_command.extend(cli_args); + seatbelt_command.push("--".to_string()); + seatbelt_command.extend(command); + seatbelt_command +} + +#[derive(Debug)] +pub struct RawExecToolCallOutput { + pub exit_status: ExitStatus, + pub stdout: Vec, + pub stderr: Vec, +} + +#[derive(Debug)] +pub struct ExecToolCallOutput { + pub exit_code: i32, + pub stdout: String, + pub stderr: String, + pub duration: Duration, +} + +pub async fn exec( + ExecParams { + command, + workdir, + timeout_ms, + }: ExecParams, + ctrl_c: Arc, +) -> Result { + let mut child = { + if command.is_empty() { + return Err(CodexErr::Io(io::Error::new( + io::ErrorKind::InvalidInput, + "command args are empty", + ))); + } + + let mut cmd = Command::new(&command[0]); + if command.len() > 1 { + cmd.args(&command[1..]); + } + if let Some(dir) = &workdir { + cmd.current_dir(dir); + } + cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); + cmd.kill_on_drop(true); + cmd.spawn()? + }; + + let stdout_handle = tokio::spawn(read_capped( + BufReader::new(child.stdout.take().expect("stdout is not piped")), + MAX_STREAM_OUTPUT, + )); + let stderr_handle = tokio::spawn(read_capped( + BufReader::new(child.stderr.take().expect("stderr is not piped")), + MAX_STREAM_OUTPUT, + )); + + let interrupted = ctrl_c.notified(); + let timeout = Duration::from_millis(timeout_ms.unwrap_or(DEFAULT_TIMEOUT_MS)); + let exit_status = tokio::select! { + result = tokio::time::timeout(timeout, child.wait()) => { + match result { + Ok(Ok(exit_status)) => exit_status, + Ok(e) => e?, + Err(_) => { + // timeout + child.start_kill()?; + // Debatable whether `child.wait().await` should be called here. + synthetic_exit_status(128 + SIGKILL_CODE) + } + } + } + _ = interrupted => { + child.start_kill()?; + synthetic_exit_status(128 + SIGKILL_CODE) + } + }; + + let stdout = stdout_handle.await??; + let stderr = stderr_handle.await??; + + Ok(RawExecToolCallOutput { + exit_status, + stdout, + stderr, + }) +} + +async fn read_capped( + mut reader: R, + max_output: usize, +) -> io::Result> { + let mut buf = Vec::with_capacity(max_output.min(8 * 1024)); + let mut tmp = [0u8; 8192]; + + loop { + let n = reader.read(&mut tmp).await?; + if n == 0 { + break; + } + if buf.len() < max_output { + let remaining = max_output - buf.len(); + buf.extend_from_slice(&tmp[..remaining.min(n)]); + } + } + Ok(buf) +} + +#[cfg(unix)] +fn synthetic_exit_status(code: i32) -> ExitStatus { + use std::os::unix::process::ExitStatusExt; + std::process::ExitStatus::from_raw(code) +} + +#[cfg(windows)] +fn synthetic_exit_status(code: u32) -> ExitStatus { + use std::os::windows::process::ExitStatusExt; + std::process::ExitStatus::from_raw(code) +} diff --git a/codex-rs/core/src/flags.rs b/codex-rs/core/src/flags.rs new file mode 100644 index 00000000..41572f1a --- /dev/null +++ b/codex-rs/core/src/flags.rs @@ -0,0 +1,30 @@ +use std::time::Duration; + +use env_flags::env_flags; + +use crate::error::CodexErr; +use crate::error::Result; + +env_flags! { + pub OPENAI_DEFAULT_MODEL: &str = "o3"; + pub OPENAI_API_BASE: &str = "https://api.openai.com"; + pub OPENAI_API_KEY: Option<&str> = None; + pub OPENAI_TIMEOUT_MS: Duration = Duration::from_millis(30_000), |value| { + value.parse().map(Duration::from_millis) + }; + pub OPENAI_REQUEST_MAX_RETRIES: u64 = 4; + pub OPENAI_STREAM_MAX_RETRIES: u64 = 10; + + /// Maximum idle time (no SSE events received) before the stream is treated as + /// disconnected and retried by the agent. The default of 75 s is slightly + /// above OpenAI’s documented 60 s load‑balancer timeout. + pub OPENAI_STREAM_IDLE_TIMEOUT_MS: Duration = Duration::from_millis(75_000), |value| { + value.parse().map(Duration::from_millis) + }; + + pub CODEX_RS_SSE_FIXTURE: Option<&str> = None; +} + +pub fn get_api_key() -> Result<&'static str> { + OPENAI_API_KEY.ok_or_else(|| CodexErr::EnvVar("OPENAI_API_KEY")) +} diff --git a/codex-rs/core/src/is_safe_command.rs b/codex-rs/core/src/is_safe_command.rs new file mode 100644 index 00000000..b4d8f8c0 --- /dev/null +++ b/codex-rs/core/src/is_safe_command.rs @@ -0,0 +1,332 @@ +use tree_sitter::Parser; +use tree_sitter::Tree; +use tree_sitter_bash::LANGUAGE as BASH; + +pub fn is_known_safe_command(command: &[String]) -> bool { + if is_safe_to_call_with_exec(command) { + return true; + } + + // TODO(mbolin): Also support safe commands that are piped together such + // as `cat foo | wc -l`. + matches!( + command, + [bash, flag, script] + if bash == "bash" + && flag == "-lc" + && try_parse_bash(script).and_then(|tree| + try_parse_single_word_only_command(&tree, script)).is_some_and(|parsed_bash_command| is_safe_to_call_with_exec(&parsed_bash_command)) + ) +} + +fn is_safe_to_call_with_exec(command: &[String]) -> bool { + let cmd0 = command.first().map(String::as_str); + + match cmd0 { + Some( + "cat" | "cd" | "echo" | "grep" | "head" | "ls" | "pwd" | "rg" | "tail" | "wc" | "which", + ) => true, + + Some("find") => { + // Certain options to `find` can delete files, write to files, or + // execute arbitrary commands, so we cannot auto-approve the + // invocation of `find` in such cases. + #[rustfmt::skip] + const UNSAFE_FIND_OPTIONS: &[&str] = &[ + // Options that can execute arbitrary commands. + "-exec", "-execdir", "-ok", "-okdir", + // Option that deletes matching files. + "-delete", + // Options that write pathnames to a file. + "-fls", "-fprint", "-fprint0", "-fprintf", + ]; + + !command + .iter() + .any(|arg| UNSAFE_FIND_OPTIONS.contains(&arg.as_str())) + } + + // Git + Some("git") => matches!( + command.get(1).map(String::as_str), + Some("branch" | "status" | "log" | "diff" | "show") + ), + + // Rust + Some("cargo") if command.get(1).map(String::as_str) == Some("check") => true, + + // Special-case `sed -n {N|M,N}p FILE` + Some("sed") + if { + command.len() == 4 + && command.get(1).map(String::as_str) == Some("-n") + && is_valid_sed_n_arg(command.get(2).map(String::as_str)) + && command.get(3).map(String::is_empty) == Some(false) + } => + { + true + } + + // ── anything else ───────────────────────────────────────────────── + _ => false, + } +} + +fn try_parse_bash(bash_lc_arg: &str) -> Option { + let lang = BASH.into(); + let mut parser = Parser::new(); + parser.set_language(&lang).expect("load bash grammar"); + + let old_tree: Option<&Tree> = None; + parser.parse(bash_lc_arg, old_tree) +} + +/// If `tree` represents a single Bash command whose name and every argument is +/// an ordinary `word`, return those words in order; otherwise, return `None`. +/// +/// `src` must be the exact source string that was parsed into `tree`, so we can +/// extract the text for every node. +pub fn try_parse_single_word_only_command(tree: &Tree, src: &str) -> Option> { + // Any parse error is an immediate rejection. + if tree.root_node().has_error() { + return None; + } + + // (program …) with exactly one statement + let root = tree.root_node(); + if root.kind() != "program" || root.named_child_count() != 1 { + return None; + } + + let cmd = root.named_child(0)?; // (command …) + if cmd.kind() != "command" { + return None; + } + + let mut words = Vec::new(); + let mut cursor = cmd.walk(); + + for child in cmd.named_children(&mut cursor) { + match child.kind() { + // The command name node wraps one `word` child. + "command_name" => { + let word_node = child.named_child(0)?; // make sure it's only a word + if word_node.kind() != "word" { + return None; + } + words.push(word_node.utf8_text(src.as_bytes()).ok()?.to_owned()); + } + // Positional‑argument word (allowed). + "word" | "number" => { + words.push(child.utf8_text(src.as_bytes()).ok()?.to_owned()); + } + "string" => { + if child.child_count() == 3 + && child.child(0)?.kind() == "\"" + && child.child(1)?.kind() == "string_content" + && child.child(2)?.kind() == "\"" + { + words.push(child.child(1)?.utf8_text(src.as_bytes()).ok()?.to_owned()); + } else { + // Anything else means the command is *not* plain words. + return None; + } + } + "concatenation" => { + // TODO: Consider things like `'ab\'a'`. + return None; + } + "raw_string" => { + // Raw string is a single word, but we need to strip the quotes. + let raw_string = child.utf8_text(src.as_bytes()).ok()?; + let stripped = raw_string + .strip_prefix('\'') + .and_then(|s| s.strip_suffix('\'')); + if let Some(stripped) = stripped { + words.push(stripped.to_owned()); + } else { + return None; + } + } + // Anything else means the command is *not* plain words. + _ => return None, + } + } + + Some(words) +} + +/* ---------------------------------------------------------- +Example +---------------------------------------------------------- */ + +/// Returns true if `arg` matches /^(\d+,)?\d+p$/ +fn is_valid_sed_n_arg(arg: Option<&str>) -> bool { + // unwrap or bail + let s = match arg { + Some(s) => s, + None => return false, + }; + + // must end with 'p', strip it + let core = match s.strip_suffix('p') { + Some(rest) => rest, + None => return false, + }; + + // split on ',' and ensure 1 or 2 numeric parts + let parts: Vec<&str> = core.split(',').collect(); + match parts.as_slice() { + // single number, e.g. "10" + [num] => !num.is_empty() && num.chars().all(|c| c.is_ascii_digit()), + + // two numbers, e.g. "1,5" + [a, b] => { + !a.is_empty() + && !b.is_empty() + && a.chars().all(|c| c.is_ascii_digit()) + && b.chars().all(|c| c.is_ascii_digit()) + } + + // anything else (more than one comma) is invalid + _ => false, + } +} +#[cfg(test)] +mod tests { + use super::*; + + fn vec_str(args: &[&str]) -> Vec { + args.iter().map(|s| s.to_string()).collect() + } + + #[test] + fn known_safe_examples() { + assert!(is_safe_to_call_with_exec(&vec_str(&["ls"]))); + assert!(is_safe_to_call_with_exec(&vec_str(&["git", "status"]))); + assert!(is_safe_to_call_with_exec(&vec_str(&[ + "sed", "-n", "1,5p", "file.txt" + ]))); + + // Safe `find` command (no unsafe options). + assert!(is_safe_to_call_with_exec(&vec_str(&[ + "find", ".", "-name", "file.txt" + ]))); + } + + #[test] + fn unknown_or_partial() { + assert!(!is_safe_to_call_with_exec(&vec_str(&["foo"]))); + assert!(!is_safe_to_call_with_exec(&vec_str(&["git", "fetch"]))); + assert!(!is_safe_to_call_with_exec(&vec_str(&[ + "sed", "-n", "xp", "file.txt" + ]))); + + // Unsafe `find` commands. + for args in [ + vec_str(&["find", ".", "-name", "file.txt", "-exec", "rm", "{}", ";"]), + vec_str(&[ + "find", ".", "-name", "*.py", "-execdir", "python3", "{}", ";", + ]), + vec_str(&["find", ".", "-name", "file.txt", "-ok", "rm", "{}", ";"]), + vec_str(&["find", ".", "-name", "*.py", "-okdir", "python3", "{}", ";"]), + vec_str(&["find", ".", "-delete", "-name", "file.txt"]), + vec_str(&["find", ".", "-fls", "/etc/passwd"]), + vec_str(&["find", ".", "-fprint", "/etc/passwd"]), + vec_str(&["find", ".", "-fprint0", "/etc/passwd"]), + vec_str(&["find", ".", "-fprintf", "/root/suid.txt", "%#m %u %p\n"]), + ] { + assert!( + !is_safe_to_call_with_exec(&args), + "expected {:?} to be unsafe", + args + ); + } + } + + #[test] + fn bash_lc_safe_examples() { + assert!(is_known_safe_command(&vec_str(&["bash", "-lc", "ls"]))); + assert!(is_known_safe_command(&vec_str(&["bash", "-lc", "ls -1"]))); + assert!(is_known_safe_command(&vec_str(&[ + "bash", + "-lc", + "git status" + ]))); + assert!(is_known_safe_command(&vec_str(&[ + "bash", + "-lc", + "grep -R \"Cargo.toml\" -n" + ]))); + assert!(is_known_safe_command(&vec_str(&[ + "bash", + "-lc", + "sed -n 1,5p file.txt" + ]))); + assert!(is_known_safe_command(&vec_str(&[ + "bash", + "-lc", + "sed -n '1,5p' file.txt" + ]))); + + assert!(is_known_safe_command(&vec_str(&[ + "bash", + "-lc", + "find . -name file.txt" + ]))); + } + + #[test] + fn bash_lc_unsafe_examples() { + assert!( + !is_known_safe_command(&vec_str(&["bash", "-lc", "git", "status"])), + "Four arg version is not known to be safe." + ); + assert!( + !is_known_safe_command(&vec_str(&["bash", "-lc", "'git status'"])), + "The extra quoting around 'git status' makes it a program named 'git status' and is therefore unsafe." + ); + + assert!( + !is_known_safe_command(&vec_str(&["bash", "-lc", "find . -name file.txt -delete"])), + "Unsafe find option should not be auto‑approved." + ); + } + + #[test] + fn test_try_parse_single_word_only_command() { + let script_with_single_quoted_string = "sed -n '1,5p' file.txt"; + let parsed_words = try_parse_bash(script_with_single_quoted_string) + .and_then(|tree| { + try_parse_single_word_only_command(&tree, script_with_single_quoted_string) + }) + .unwrap(); + assert_eq!( + vec![ + "sed".to_string(), + "-n".to_string(), + // Ensure the single quotes are properly removed. + "1,5p".to_string(), + "file.txt".to_string() + ], + parsed_words, + ); + + let script_with_number_arg = "ls -1"; + let parsed_words = try_parse_bash(script_with_number_arg) + .and_then(|tree| try_parse_single_word_only_command(&tree, script_with_number_arg)) + .unwrap(); + assert_eq!(vec!["ls", "-1"], parsed_words,); + + let script_with_double_quoted_string_with_no_funny_stuff_arg = "grep -R \"Cargo.toml\" -n"; + let parsed_words = try_parse_bash(script_with_double_quoted_string_with_no_funny_stuff_arg) + .and_then(|tree| { + try_parse_single_word_only_command( + &tree, + script_with_double_quoted_string_with_no_funny_stuff_arg, + ) + }) + .unwrap(); + assert_eq!(vec!["grep", "-R", "Cargo.toml", "-n"], parsed_words); + } +} diff --git a/codex-rs/core/src/lib.rs b/codex-rs/core/src/lib.rs new file mode 100644 index 00000000..7d330915 --- /dev/null +++ b/codex-rs/core/src/lib.rs @@ -0,0 +1,30 @@ +//! Root of the `codex-core` library. + +// Prevent accidental direct writes to stdout/stderr in library code. All +// user‑visible output must go through the appropriate abstraction (e.g., +// the TUI or the tracing stack). +#![deny(clippy::print_stdout, clippy::print_stderr)] + +mod client; +pub mod codex; +pub mod codex_wrapper; +pub mod config; +pub mod error; +pub mod exec; +mod flags; +mod is_safe_command; +#[cfg(target_os = "linux")] +mod linux; +mod models; +pub mod protocol; +mod safety; +pub mod util; + +pub use codex::Codex; + +#[cfg(feature = "cli")] +mod approval_mode_cli_arg; +#[cfg(feature = "cli")] +pub use approval_mode_cli_arg::ApprovalModeCliArg; +#[cfg(feature = "cli")] +pub use approval_mode_cli_arg::SandboxModeCliArg; diff --git a/codex-rs/core/src/linux.rs b/codex-rs/core/src/linux.rs new file mode 100644 index 00000000..f2dd9e6b --- /dev/null +++ b/codex-rs/core/src/linux.rs @@ -0,0 +1,320 @@ +use std::collections::BTreeMap; +use std::io; +use std::path::PathBuf; +use std::sync::Arc; + +use crate::error::CodexErr; +use crate::error::Result; +use crate::error::SandboxErr; +use crate::exec::exec; +use crate::exec::ExecParams; +use crate::exec::RawExecToolCallOutput; + +use landlock::Access; +use landlock::AccessFs; +use landlock::CompatLevel; +use landlock::Compatible; +use landlock::Ruleset; +use landlock::RulesetAttr; +use landlock::RulesetCreatedAttr; +use landlock::ABI; +use seccompiler::apply_filter; +use seccompiler::BpfProgram; +use seccompiler::SeccompAction; +use seccompiler::SeccompCmpArgLen; +use seccompiler::SeccompCmpOp; +use seccompiler::SeccompCondition; +use seccompiler::SeccompFilter; +use seccompiler::SeccompRule; +use seccompiler::TargetArch; +use tokio::sync::Notify; + +pub async fn exec_linux( + params: ExecParams, + writable_roots: &[PathBuf], + ctrl_c: Arc, +) -> Result { + // Allow READ on / + // Allow WRITE on /dev/null + let ctrl_c_copy = ctrl_c.clone(); + let writable_roots_copy = writable_roots.to_vec(); + + // Isolate thread to run the sandbox from + let tool_call_output = std::thread::spawn(move || { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("Failed to create runtime"); + + rt.block_on(async { + let abi = ABI::V5; + let access_rw = AccessFs::from_all(abi); + let access_ro = AccessFs::from_read(abi); + + let mut ruleset = Ruleset::default() + .set_compatibility(CompatLevel::BestEffort) + .handle_access(access_rw)? + .create()? + .add_rules(landlock::path_beneath_rules(&["/"], access_ro))? + .add_rules(landlock::path_beneath_rules(&["/dev/null"], access_rw))? + .set_no_new_privs(true); + + if !writable_roots_copy.is_empty() { + ruleset = ruleset.add_rules(landlock::path_beneath_rules( + &writable_roots_copy, + access_rw, + ))?; + } + + let status = ruleset.restrict_self()?; + + // TODO(wpt): Probably wanna expand this more generically and not warn every time. + if status.ruleset == landlock::RulesetStatus::NotEnforced { + return Err(CodexErr::Sandbox(SandboxErr::LandlockRestrict)); + } + + if let Err(e) = install_network_seccomp_filter() { + return Err(CodexErr::Sandbox(e)); + } + + exec(params, ctrl_c_copy).await + }) + }) + .join(); + + match tool_call_output { + Ok(Ok(output)) => Ok(output), + Ok(Err(e)) => Err(e), + Err(e) => Err(CodexErr::Io(io::Error::new( + io::ErrorKind::Other, + format!("thread join failed: {e:?}"), + ))), + } +} + +fn install_network_seccomp_filter() -> std::result::Result<(), SandboxErr> { + // Build rule map. + let mut rules: BTreeMap> = BTreeMap::new(); + + // Helper – insert unconditional deny rule for syscall number. + let mut deny_syscall = |nr: i64| { + rules.insert(nr, vec![]); // empty rule vec = unconditional match + }; + + deny_syscall(libc::SYS_connect); + deny_syscall(libc::SYS_accept); + deny_syscall(libc::SYS_accept4); + deny_syscall(libc::SYS_bind); + deny_syscall(libc::SYS_listen); + deny_syscall(libc::SYS_getpeername); + deny_syscall(libc::SYS_getsockname); + deny_syscall(libc::SYS_shutdown); + deny_syscall(libc::SYS_sendto); + deny_syscall(libc::SYS_sendmsg); + deny_syscall(libc::SYS_sendmmsg); + deny_syscall(libc::SYS_recvfrom); + deny_syscall(libc::SYS_recvmsg); + deny_syscall(libc::SYS_recvmmsg); + deny_syscall(libc::SYS_getsockopt); + deny_syscall(libc::SYS_setsockopt); + deny_syscall(libc::SYS_ptrace); + + // For `socket` we allow AF_UNIX (arg0 == AF_UNIX) and deny everything else. + let unix_only_rule = SeccompRule::new(vec![SeccompCondition::new( + 0, // first argument (domain) + SeccompCmpArgLen::Dword, + SeccompCmpOp::Eq, + libc::AF_UNIX as u64, + )?])?; + + rules.insert(libc::SYS_socket, vec![unix_only_rule]); + rules.insert(libc::SYS_socketpair, vec![]); // always deny (Unix can use socketpair but fine, keep open?) + + let filter = SeccompFilter::new( + rules, + SeccompAction::Allow, // default – allow + SeccompAction::Errno(libc::EPERM as u32), // when rule matches – return EPERM + if cfg!(target_arch = "x86_64") { + TargetArch::x86_64 + } else if cfg!(target_arch = "aarch64") { + TargetArch::aarch64 + } else { + unimplemented!("unsupported architecture for seccomp filter"); + }, + )?; + + let prog: BpfProgram = filter.try_into()?; + + apply_filter(&prog)?; + + Ok(()) +} + +#[cfg(test)] +mod tests_linux { + use super::*; + use crate::exec::process_exec_tool_call; + use crate::exec::ExecParams; + use crate::exec::SandboxType; + use std::sync::Arc; + use tempfile::NamedTempFile; + use tokio::sync::Notify; + + #[allow(clippy::print_stdout)] + async fn run_cmd(cmd: &[&str], writable_roots: &[PathBuf]) { + let params = ExecParams { + command: cmd.iter().map(|elm| elm.to_string()).collect(), + workdir: None, + timeout_ms: Some(200), + }; + let res = process_exec_tool_call( + params, + SandboxType::LinuxSeccomp, + writable_roots, + Arc::new(Notify::new()), + ) + .await + .unwrap(); + + if res.exit_code != 0 { + println!("stdout:\n{}", res.stdout); + println!("stderr:\n{}", res.stderr); + panic!("exit code: {}", res.exit_code); + } + } + + #[tokio::test] + async fn test_root_read() { + run_cmd(&["ls", "-l", "/bin"], &[]).await; + } + + #[tokio::test] + #[should_panic] + async fn test_root_write() { + let tmpfile = NamedTempFile::new().unwrap(); + let tmpfile_path = tmpfile.path().to_string_lossy(); + run_cmd( + &["bash", "-lc", &format!("echo blah > {}", tmpfile_path)], + &[], + ) + .await; + } + + #[tokio::test] + async fn test_dev_null_write() { + run_cmd(&["echo", "blah", ">", "/dev/null"], &[]).await; + } + + #[tokio::test] + async fn test_writable_root() { + let tmpdir = tempfile::tempdir().unwrap(); + let file_path = tmpdir.path().join("test"); + run_cmd( + &[ + "bash", + "-lc", + &format!("echo blah > {}", file_path.to_string_lossy()), + ], + &[tmpdir.path().to_path_buf()], + ) + .await; + } + + /// Helper that runs `cmd` under the Linux sandbox and asserts that the command + /// does NOT succeed (i.e. returns a non‑zero exit code) **unless** the binary + /// is missing in which case we silently treat it as an accepted skip so the + /// suite remains green on leaner CI images. + async fn assert_network_blocked(cmd: &[&str]) { + let params = ExecParams { + command: cmd.iter().map(|s| s.to_string()).collect(), + workdir: None, + // Give the tool a generous 2‑second timeout so even slow DNS timeouts + // do not stall the suite. + timeout_ms: Some(2_000), + }; + + let result = process_exec_tool_call( + params, + SandboxType::LinuxSeccomp, + &[], + Arc::new(Notify::new()), + ) + .await; + + let (exit_code, stdout, stderr) = match result { + Ok(output) => (output.exit_code, output.stdout, output.stderr), + Err(CodexErr::Sandbox(SandboxErr::Denied(exit_code, stdout, stderr))) => { + (exit_code, stdout, stderr) + } + _ => { + panic!("expected sandbox denied error, got: {:?}", result); + } + }; + + dbg!(&stderr); + dbg!(&stdout); + dbg!(&exit_code); + + // A completely missing binary exits with 127. Anything else should also + // be non‑zero (EPERM from seccomp will usually bubble up as 1, 2, 13…) + // If—*and only if*—the command exits 0 we consider the sandbox breached. + + if exit_code == 0 { + panic!( + "Network sandbox FAILED - {:?} exited 0\nstdout:\n{}\nstderr:\n{}", + cmd, stdout, stderr + ); + } + } + + #[tokio::test] + async fn sandbox_blocks_curl() { + assert_network_blocked(&["curl", "-I", "http://openai.com"]).await; + } + + #[cfg(target_os = "linux")] + #[tokio::test] + async fn sandbox_blocks_wget() { + assert_network_blocked(&["wget", "-qO-", "http://openai.com"]).await; + } + + #[tokio::test] + async fn sandbox_blocks_ping() { + // ICMP requires raw socket – should be denied quickly with EPERM. + assert_network_blocked(&["ping", "-c", "1", "8.8.8.8"]).await; + } + + #[tokio::test] + async fn sandbox_blocks_nc() { + // Zero‑length connection attempt to localhost. + assert_network_blocked(&["nc", "-z", "127.0.0.1", "80"]).await; + } + + #[tokio::test] + async fn sandbox_blocks_ssh() { + // Force ssh to attempt a real TCP connection but fail quickly. `BatchMode` + // avoids password prompts, and `ConnectTimeout` keeps the hang time low. + assert_network_blocked(&[ + "ssh", + "-o", + "BatchMode=yes", + "-o", + "ConnectTimeout=1", + "github.com", + ]) + .await; + } + + #[tokio::test] + async fn sandbox_blocks_getent() { + assert_network_blocked(&["getent", "ahosts", "openai.com"]).await; + } + + #[tokio::test] + async fn sandbox_blocks_dev_tcp_redirection() { + // This syntax is only supported by bash and zsh. We try bash first. + // Fallback generic socket attempt using /bin/sh with bash‑style /dev/tcp. Not + // all images ship bash, so we guard against 127 as well. + assert_network_blocked(&["bash", "-c", "echo hi > /dev/tcp/127.0.0.1/80"]).await; + } +} diff --git a/codex-rs/core/src/models.rs b/codex-rs/core/src/models.rs new file mode 100644 index 00000000..551ac318 --- /dev/null +++ b/codex-rs/core/src/models.rs @@ -0,0 +1,175 @@ +use base64::Engine; +use serde::ser::Serializer; +use serde::Deserialize; +use serde::Serialize; + +use crate::protocol::InputItem; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseInputItem { + Message { + role: String, + content: Vec, + }, + FunctionCallOutput { + call_id: String, + output: FunctionCallOutputPayload, + }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ContentItem { + InputText { text: String }, + InputImage { image_url: String }, + OutputText { text: String }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseItem { + Message { + role: String, + content: Vec, + }, + FunctionCall { + name: String, + // The Responses API returns the function call arguments as a *string* that contains + // JSON, not as an already‑parsed object. We keep it as a raw string here and let + // Session::handle_function_call parse it into a Value. This exactly matches the + // Chat Completions + Responses API behavior. + arguments: String, + call_id: String, + }, + // NOTE: The input schema for `function_call_output` objects that clients send to the + // OpenAI /v1/responses endpoint is NOT the same shape as the objects the server returns on the + // SSE stream. When *sending* we must wrap the string output inside an object that includes a + // required `success` boolean. The upstream TypeScript CLI does this implicitly. To ensure we + // serialize exactly the expected shape we introduce a dedicated payload struct and flatten it + // here. + FunctionCallOutput { + call_id: String, + output: FunctionCallOutputPayload, + }, + #[serde(other)] + Other, +} + +impl From> for ResponseInputItem { + fn from(items: Vec) -> Self { + Self::Message { + role: "user".to_string(), + content: items + .into_iter() + .filter_map(|c| match c { + InputItem::Text { text } => Some(ContentItem::InputText { text }), + InputItem::Image { image_url } => Some(ContentItem::InputImage { image_url }), + InputItem::LocalImage { path } => match std::fs::read(&path) { + Ok(bytes) => { + let mime = mime_guess::from_path(&path) + .first() + .map(|m| m.essence_str().to_owned()) + .unwrap_or_else(|| "application/octet-stream".to_string()); + let encoded = base64::engine::general_purpose::STANDARD.encode(bytes); + Some(ContentItem::InputImage { + image_url: format!("data:{};base64,{}", mime, encoded), + }) + } + Err(err) => { + tracing::warn!( + "Skipping image {} – could not read file: {}", + path.display(), + err + ); + None + } + }, + }) + .collect::>(), + } + } +} + +#[expect(dead_code)] +#[derive(Deserialize, Debug, Clone)] +pub struct FunctionCallOutputPayload { + pub content: String, + pub success: Option, +} + +// The Responses API expects two *different* shapes depending on success vs failure: +// • success → output is a plain string (no nested object) +// • failure → output is an object { content, success:false } +// The upstream TypeScript CLI implements this by special‑casing the serialize path. +// We replicate that behavior with a manual Serialize impl. + +impl Serialize for FunctionCallOutputPayload { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + // The upstream TypeScript CLI always serializes `output` as a *plain string* regardless + // of whether the function call succeeded or failed. The boolean is purely informational + // for local bookkeeping and is NOT sent to the OpenAI endpoint. Sending the nested object + // form `{ content, success:false }` triggers the 400 we are still seeing. Mirror the JS CLI + // exactly: always emit a bare string. + + serializer.serialize_str(&self.content) + } +} + +// Implement Display so callers can treat the payload like a plain string when logging or doing +// trivial substring checks in tests (existing tests call `.contains()` on the output). Display +// returns the raw `content` field. + +impl std::fmt::Display for FunctionCallOutputPayload { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.content) + } +} + +impl std::ops::Deref for FunctionCallOutputPayload { + type Target = str; + fn deref(&self) -> &Self::Target { + &self.content + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn serializes_success_as_plain_string() { + let item = ResponseInputItem::FunctionCallOutput { + call_id: "call1".into(), + output: FunctionCallOutputPayload { + content: "ok".into(), + success: None, + }, + }; + + let json = serde_json::to_string(&item).unwrap(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + // Success case -> output should be a plain string + assert_eq!(v.get("output").unwrap().as_str().unwrap(), "ok"); + } + + #[test] + fn serializes_failure_as_string() { + let item = ResponseInputItem::FunctionCallOutput { + call_id: "call1".into(), + output: FunctionCallOutputPayload { + content: "bad".into(), + success: Some(false), + }, + }; + + let json = serde_json::to_string(&item).unwrap(); + let v: serde_json::Value = serde_json::from_str(&json).unwrap(); + + assert_eq!(v.get("output").unwrap().as_str().unwrap(), "bad"); + } +} diff --git a/codex-rs/core/src/protocol.rs b/codex-rs/core/src/protocol.rs new file mode 100644 index 00000000..d1975ae8 --- /dev/null +++ b/codex-rs/core/src/protocol.rs @@ -0,0 +1,275 @@ +//! Defines the protocol for a Codex session between a client and an agent. +//! +//! Uses a SQ (Submission Queue) / EQ (Event Queue) pattern to asynchronously communicate +//! between user and agent. + +use std::collections::HashMap; +use std::path::PathBuf; + +use serde::Deserialize; +use serde::Serialize; + +/// Submission Queue Entry - requests from user +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Submission { + /// Unique id for this Submission to correlate with Events + pub id: String, + /// Payload + pub op: Op, +} + +/// Submission operation +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "type", rename_all = "snake_case")] +#[non_exhaustive] +pub enum Op { + /// Configure the model session. + ConfigureSession { + /// If not specified, server will use its default model. + model: Option, + /// Model instructions + instructions: Option, + /// When to escalate for approval for execution + approval_policy: AskForApproval, + /// How to sandbox commands executed in the system + sandbox_policy: SandboxPolicy, + }, + + /// Abort current task. + /// This server sends no corresponding Event + Interrupt, + + /// Input from the user + UserInput { + /// User input items, see `InputItem` + items: Vec, + }, + + /// Approve a command execution + ExecApproval { + /// The id of the submission we are approving + id: String, + /// The user's decision in response to the request. + decision: ReviewDecision, + }, + + /// Approve a code patch + PatchApproval { + /// The id of the submission we are approving + id: String, + /// The user's decision in response to the request. + decision: ReviewDecision, + }, +} + +/// Determines how liberally commands are auto‑approved by the system. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum AskForApproval { + /// Under this policy, only “known safe” commands—as determined by + /// `is_safe_command()`—that **only read files** are auto‑approved. + /// Everything else will ask the user to approve. + UnlessAllowListed, + + /// In addition to everything allowed by **`Suggest`**, commands that + /// *write* to files **within the user’s approved list of writable paths** + /// are also auto‑approved. + /// TODO(ragona): fix + AutoEdit, + + /// *All* commands are auto‑approved, but they are expected to run inside a + /// sandbox where network access is disabled and writes are confined to a + /// specific set of paths. If the command fails, it will be escalated to + /// the user to approve execution without a sandbox. + OnFailure, + + /// Never ask the user to approve commands. Failures are immediately returned + /// to the model, and never escalated to the user for approval. + Never, +} + +/// Determines execution restrictions for model shell commands +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum SandboxPolicy { + /// Network syscalls will be blocked + NetworkRestricted, + /// Filesystem writes will be restricted + FileWriteRestricted, + /// Network and filesystem writes will be restricted + NetworkAndFileWriteRestricted, + /// No restrictions; full "unsandboxed" mode + DangerousNoRestrictions, +} + +/// User input +#[non_exhaustive] +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum InputItem { + Text { + text: String, + }, + /// Pre‑encoded data: URI image. + Image { + image_url: String, + }, + + /// Local image path provided by the user. This will be converted to an + /// `Image` variant (base64 data URL) during request serialization. + LocalImage { + path: std::path::PathBuf, + }, +} + +/// Event Queue Entry - events from agent +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Event { + /// Submission `id` that this event is correlated with. + pub id: String, + /// Payload + pub msg: EventMsg, +} + +/// Response event from the agent +#[non_exhaustive] +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum EventMsg { + /// Error while executing a submission + Error { + message: String, + }, + + /// Agent has started a task + TaskStarted, + + /// Agent has completed all actions + TaskComplete, + + /// Agent text output message + AgentMessage { + message: String, + }, + + /// Ack the client's configure message. + SessionConfigured { + /// Tell the client what model is being queried. + model: String, + }, + + /// Notification that the server is about to execute a command. + ExecCommandBegin { + /// Identifier so this can be paired with the ExecCommandEnd event. + call_id: String, + /// The command to be executed. + command: Vec, + /// The command's working directory if not the default cwd for the + /// agent. + cwd: String, + }, + + ExecCommandEnd { + /// Identifier for the ExecCommandBegin that finished. + call_id: String, + /// Captured stdout + stdout: String, + /// Captured stderr + stderr: String, + /// The command's exit code. + exit_code: i32, + }, + + ExecApprovalRequest { + /// The command to be executed. + command: Vec, + /// The command's working directory. + cwd: PathBuf, + /// Optional human‑readable reason for the approval (e.g. retry without + /// sandbox). + #[serde(skip_serializing_if = "Option::is_none")] + reason: Option, + }, + + ApplyPatchApprovalRequest { + changes: HashMap, + /// Optional explanatory reason (e.g. request for extra write access). + #[serde(skip_serializing_if = "Option::is_none")] + reason: Option, + + /// When set, the agent is asking the user to allow writes under this + /// root for the remainder of the session. + #[serde(skip_serializing_if = "Option::is_none")] + grant_root: Option, + }, + + BackgroundEvent { + message: String, + }, + + /// Notification that the agent is about to apply a code patch. Mirrors + /// `ExecCommandBegin` so front‑ends can show progress indicators. + PatchApplyBegin { + /// Identifier so this can be paired with the PatchApplyEnd event. + call_id: String, + + /// If true, there was no ApplyPatchApprovalRequest for this patch. + auto_approved: bool, + + /// The changes to be applied. + changes: HashMap, + }, + + /// Notification that a patch application has finished. + PatchApplyEnd { + /// Identifier for the PatchApplyBegin that finished. + call_id: String, + /// Captured stdout (summary printed by apply_patch). + stdout: String, + /// Captured stderr (parser errors, IO failures, etc.). + stderr: String, + /// Whether the patch was applied successfully. + success: bool, + }, +} + +/// User's decision in response to an ExecApprovalRequest. +#[derive(Debug, Default, Clone, Copy, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum ReviewDecision { + /// User has approved this command and the agent should execute it. + Approved, + + /// User has approved this command and wants to automatically approve any + /// future identical instances (`command` and `cwd` match exactly) for the + /// remainder of the session. + ApprovedForSession, + + /// User has denied this command and the agent should not execute it, but + /// it should continue the session and try something else. + #[default] + Denied, + + /// User has denied this command and the agent should not do anything until + /// the user's next command. + Abort, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum FileChange { + Add { + content: String, + }, + Delete, + Update { + unified_diff: String, + move_path: Option, + }, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Chunk { + /// 1-based line index of the first line in the original file + pub orig_index: u32, + pub deleted_lines: Vec, + pub inserted_lines: Vec, +} diff --git a/codex-rs/core/src/safety.rs b/codex-rs/core/src/safety.rs new file mode 100644 index 00000000..e7841b2a --- /dev/null +++ b/codex-rs/core/src/safety.rs @@ -0,0 +1,236 @@ +use std::collections::HashMap; +use std::collections::HashSet; +use std::path::Component; +use std::path::Path; +use std::path::PathBuf; + +use codex_apply_patch::ApplyPatchFileChange; + +use crate::exec::SandboxType; +use crate::is_safe_command::is_known_safe_command; +use crate::protocol::AskForApproval; +use crate::protocol::SandboxPolicy; + +#[derive(Debug)] +pub enum SafetyCheck { + AutoApprove { sandbox_type: SandboxType }, + AskUser, + Reject { reason: String }, +} + +pub fn assess_patch_safety( + changes: &HashMap, + policy: AskForApproval, + writable_roots: &[PathBuf], +) -> SafetyCheck { + if changes.is_empty() { + return SafetyCheck::Reject { + reason: "empty patch".to_string(), + }; + } + + match policy { + AskForApproval::OnFailure | AskForApproval::AutoEdit | AskForApproval::Never => { + // Continue to see if this can be auto-approved. + } + // TODO(ragona): I'm not sure this is actually correct? I believe in this case + // we want to continue to the writable paths check before asking the user. + AskForApproval::UnlessAllowListed => { + return SafetyCheck::AskUser; + } + } + + if is_write_patch_constrained_to_writable_paths(changes, writable_roots) { + SafetyCheck::AutoApprove { + sandbox_type: SandboxType::None, + } + } else if policy == AskForApproval::OnFailure { + // Only auto‑approve when we can actually enforce a sandbox. Otherwise + // fall back to asking the user because the patch may touch arbitrary + // paths outside the project. + match get_platform_sandbox() { + Some(sandbox_type) => SafetyCheck::AutoApprove { sandbox_type }, + None => SafetyCheck::AskUser, + } + } else if policy == AskForApproval::Never { + SafetyCheck::Reject { + reason: "writing outside of the project; rejected by user approval settings" + .to_string(), + } + } else { + SafetyCheck::AskUser + } +} + +pub fn assess_command_safety( + command: &[String], + approval_policy: AskForApproval, + sandbox_policy: SandboxPolicy, + approved: &HashSet>, +) -> SafetyCheck { + let approve_without_sandbox = || SafetyCheck::AutoApprove { + sandbox_type: SandboxType::None, + }; + + // Previously approved or allow-listed commands + // All approval modes allow these commands to continue without sandboxing + if is_known_safe_command(command) || approved.contains(command) { + // TODO(ragona): I think we should consider running even these inside the sandbox, but it's + // a change in behavior so I'm keeping it at parity with upstream for now. + return approve_without_sandbox(); + } + + // Command was not known-safe or allow-listed + match sandbox_policy { + // Only the dangerous sandbox policy will run arbitrary commands outside a sandbox + SandboxPolicy::DangerousNoRestrictions => approve_without_sandbox(), + // All other policies try to run the command in a sandbox if it is available + _ => match get_platform_sandbox() { + // We have a sandbox, so we can approve the command in all modes + Some(sandbox_type) => SafetyCheck::AutoApprove { sandbox_type }, + None => { + // We do not have a sandbox, so we need to consider the approval policy + match approval_policy { + // Never is our "non-interactive" mode; it must automatically reject + AskForApproval::Never => SafetyCheck::Reject { + reason: "auto-rejected by user approval settings".to_string(), + }, + // Otherwise, we ask the user for approval + _ => SafetyCheck::AskUser, + } + } + }, + } +} + +pub fn get_platform_sandbox() -> Option { + if cfg!(target_os = "macos") { + Some(SandboxType::MacosSeatbelt) + } else if cfg!(target_os = "linux") { + Some(SandboxType::LinuxSeccomp) + } else { + None + } +} + +fn is_write_patch_constrained_to_writable_paths( + changes: &HashMap, + writable_roots: &[PathBuf], +) -> bool { + // Early‑exit if there are no declared writable roots. + if writable_roots.is_empty() { + return false; + } + + // Normalize a path by removing `.` and resolving `..` without touching the + // filesystem (works even if the file does not exist). + fn normalize(path: &Path) -> Option { + let mut out = PathBuf::new(); + for comp in path.components() { + match comp { + Component::ParentDir => { + out.pop(); + } + Component::CurDir => { /* skip */ } + other => out.push(other.as_os_str()), + } + } + Some(out) + } + + // Determine whether `path` is inside **any** writable root. Both `path` + // and roots are converted to absolute, normalized forms before the + // prefix check. + let is_path_writable = |p: &PathBuf| { + let cwd = match std::env::current_dir() { + Ok(cwd) => cwd, + Err(_) => return false, + }; + + let abs = if p.is_absolute() { + p.clone() + } else { + cwd.join(p) + }; + let abs = match normalize(&abs) { + Some(v) => v, + None => return false, + }; + + writable_roots.iter().any(|root| { + let root_abs = if root.is_absolute() { + root.clone() + } else { + normalize(&cwd.join(root)).unwrap_or_else(|| cwd.join(root)) + }; + + abs.starts_with(&root_abs) + }) + }; + + for (path, change) in changes { + match change { + ApplyPatchFileChange::Add { .. } | ApplyPatchFileChange::Delete => { + if !is_path_writable(path) { + return false; + } + } + ApplyPatchFileChange::Update { move_path, .. } => { + if !is_path_writable(path) { + return false; + } + if let Some(dest) = move_path { + if !is_path_writable(dest) { + return false; + } + } + } + } + } + + true +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_writable_roots_constraint() { + let cwd = std::env::current_dir().unwrap(); + let parent = cwd.parent().unwrap().to_path_buf(); + + // Helper to build a single‑entry map representing a patch that adds a + // file at `p`. + let make_add_change = |p: PathBuf| { + let mut m = HashMap::new(); + m.insert( + p.clone(), + ApplyPatchFileChange::Add { + content: String::new(), + }, + ); + m + }; + + let add_inside = make_add_change(PathBuf::from("inner.txt")); + let add_outside = make_add_change(parent.join("outside.txt")); + + assert!(is_write_patch_constrained_to_writable_paths( + &add_inside, + &[PathBuf::from(".")] + )); + + let add_outside_2 = make_add_change(parent.join("outside.txt")); + assert!(!is_write_patch_constrained_to_writable_paths( + &add_outside_2, + &[PathBuf::from(".")] + )); + + // With parent dir added as writable root, it should pass. + assert!(is_write_patch_constrained_to_writable_paths( + &add_outside, + &[PathBuf::from("..")] + )) + } +} diff --git a/codex-rs/core/src/seatbelt_readonly_policy.sbpl b/codex-rs/core/src/seatbelt_readonly_policy.sbpl new file mode 100644 index 00000000..c0632658 --- /dev/null +++ b/codex-rs/core/src/seatbelt_readonly_policy.sbpl @@ -0,0 +1,70 @@ +(version 1) + +; inspired by Chrome's sandbox policy: +; https://source.chromium.org/chromium/chromium/src/+/main:sandbox/policy/mac/common.sb;l=273-319;drc=7b3962fe2e5fc9e2ee58000dc8fbf3429d84d3bd + +; start with closed-by-default +(deny default) + +; allow read-only file operations +(allow file-read*) + +; child processes inherit the policy of their parent +(allow process-exec) +(allow process-fork) +(allow signal (target self)) + +(allow file-write-data + (require-all + (path "/dev/null") + (vnode-type CHARACTER-DEVICE))) + +; sysctls permitted. +(allow sysctl-read + (sysctl-name "hw.activecpu") + (sysctl-name "hw.busfrequency_compat") + (sysctl-name "hw.byteorder") + (sysctl-name "hw.cacheconfig") + (sysctl-name "hw.cachelinesize_compat") + (sysctl-name "hw.cpufamily") + (sysctl-name "hw.cpufrequency_compat") + (sysctl-name "hw.cputype") + (sysctl-name "hw.l1dcachesize_compat") + (sysctl-name "hw.l1icachesize_compat") + (sysctl-name "hw.l2cachesize_compat") + (sysctl-name "hw.l3cachesize_compat") + (sysctl-name "hw.logicalcpu_max") + (sysctl-name "hw.machine") + (sysctl-name "hw.ncpu") + (sysctl-name "hw.nperflevels") + (sysctl-name "hw.optional.arm.FEAT_BF16") + (sysctl-name "hw.optional.arm.FEAT_DotProd") + (sysctl-name "hw.optional.arm.FEAT_FCMA") + (sysctl-name "hw.optional.arm.FEAT_FHM") + (sysctl-name "hw.optional.arm.FEAT_FP16") + (sysctl-name "hw.optional.arm.FEAT_I8MM") + (sysctl-name "hw.optional.arm.FEAT_JSCVT") + (sysctl-name "hw.optional.arm.FEAT_LSE") + (sysctl-name "hw.optional.arm.FEAT_RDM") + (sysctl-name "hw.optional.arm.FEAT_SHA512") + (sysctl-name "hw.optional.armv8_2_sha512") + (sysctl-name "hw.memsize") + (sysctl-name "hw.pagesize") + (sysctl-name "hw.packages") + (sysctl-name "hw.pagesize_compat") + (sysctl-name "hw.physicalcpu_max") + (sysctl-name "hw.tbfrequency_compat") + (sysctl-name "hw.vectorunit") + (sysctl-name "kern.hostname") + (sysctl-name "kern.maxfilesperproc") + (sysctl-name "kern.osproductversion") + (sysctl-name "kern.osrelease") + (sysctl-name "kern.ostype") + (sysctl-name "kern.osvariant_status") + (sysctl-name "kern.osversion") + (sysctl-name "kern.secure_kernel") + (sysctl-name "kern.usrstack64") + (sysctl-name "kern.version") + (sysctl-name "sysctl.proc_cputype") + (sysctl-name-prefix "hw.perflevel") +) diff --git a/codex-rs/core/src/util.rs b/codex-rs/core/src/util.rs new file mode 100644 index 00000000..27241c77 --- /dev/null +++ b/codex-rs/core/src/util.rs @@ -0,0 +1,68 @@ +use std::sync::Arc; +use std::time::Duration; + +use rand::Rng; +use tokio::sync::Notify; +use tracing::debug; + +/// Make a CancellationToken that is fulfilled when SIGINT occurs. +pub fn notify_on_sigint() -> Arc { + let notify = Arc::new(Notify::new()); + + tokio::spawn({ + let notify = Arc::clone(¬ify); + async move { + loop { + tokio::signal::ctrl_c().await.ok(); + debug!("Keyboard interrupt"); + notify.notify_waiters(); + } + } + }); + + notify +} + +/// Default exponential back‑off schedule: 200ms → 400ms → 800ms → 1600ms. +pub(crate) fn backoff(attempt: u64) -> Duration { + let base_delay_ms = 200u64 * (1u64 << (attempt - 1)); + let jitter = rand::rng().random_range(0.8..1.2); + let delay_ms = (base_delay_ms as f64 * jitter) as u64; + Duration::from_millis(delay_ms) +} + +/// Return `true` if the current working directory is inside a Git repository. +/// +/// The check walks up the directory hierarchy looking for a `.git` folder. This +/// approach does **not** require the `git` binary or the `git2` crate and is +/// therefore fairly lightweight. It intentionally only looks for the +/// presence of a *directory* named `.git` – this is good enough for regular +/// work‑trees and bare repos that live inside a work‑tree (common for +/// developers running Codex locally). +/// +/// Note that this does **not** detect *work‑trees* created with +/// `git worktree add` where the checkout lives outside the main repository +/// directory. If you need Codex to work from such a checkout simply pass the +/// `--allow-no-git-exec` CLI flag that disables the repo requirement. +pub fn is_inside_git_repo() -> bool { + // Best‑effort: any IO error is treated as "not a repo" – the caller can + // decide what to do with the result. + let mut dir = match std::env::current_dir() { + Ok(d) => d, + Err(_) => return false, + }; + + loop { + if dir.join(".git").exists() { + return true; + } + + // Pop one component (go up one directory). `pop` returns false when + // we have reached the filesystem root. + if !dir.pop() { + break; + } + } + + false +} diff --git a/codex-rs/core/tests/live_agent.rs b/codex-rs/core/tests/live_agent.rs new file mode 100644 index 00000000..6562654c --- /dev/null +++ b/codex-rs/core/tests/live_agent.rs @@ -0,0 +1,219 @@ +//! Live integration tests that exercise the full [`Agent`] stack **against the real +//! OpenAI `/v1/responses` API**. These tests complement the lightweight mock‑based +//! unit tests by verifying that the agent can drive an end‑to‑end conversation, +//! stream incremental events, execute function‑call tool invocations and safely +//! chain multiple turns inside a single session – the exact scenarios that have +//! historically been brittle. +//! +//! The live tests are **ignored by default** so CI remains deterministic and free +//! of external dependencies. Developers can opt‑in locally with e.g. +//! +//! ```bash +//! OPENAI_API_KEY=sk‑... cargo test --test live_agent -- --ignored --nocapture +//! ``` +//! +//! Make sure your key has access to the experimental *Responses* API and that +//! any billable usage is acceptable. + +use std::time::Duration; + +use codex_core::protocol::AskForApproval; +use codex_core::protocol::EventMsg; +use codex_core::protocol::InputItem; +use codex_core::protocol::Op; +use codex_core::protocol::SandboxPolicy; +use codex_core::protocol::Submission; +use codex_core::Codex; +use tokio::sync::Notify; +use tokio::time::timeout; + +fn api_key_available() -> bool { + std::env::var("OPENAI_API_KEY").is_ok() +} + +/// Helper that spawns a fresh Agent and sends the mandatory *ConfigureSession* +/// submission. The caller receives the constructed [`Agent`] plus the unique +/// submission id used for the initialization message. +async fn spawn_codex() -> Codex { + assert!( + api_key_available(), + "OPENAI_API_KEY must be set for live tests" + ); + + // Environment tweaks to keep the tests snappy and inexpensive while still + // exercising retry/robustness logic. + std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "2"); + std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "2"); + + let agent = Codex::spawn(std::sync::Arc::new(Notify::new())).unwrap(); + + agent + .submit(Submission { + id: "init".into(), + op: Op::ConfigureSession { + model: None, + instructions: None, + approval_policy: AskForApproval::OnFailure, + sandbox_policy: SandboxPolicy::NetworkAndFileWriteRestricted, + }, + }) + .await + .expect("failed to submit init"); + + // Drain the SessionInitialized event so subsequent helper loops don't have + // to special‑case it. + loop { + let ev = timeout(Duration::from_secs(30), agent.next_event()) + .await + .expect("timeout waiting for init event") + .expect("agent channel closed"); + if matches!(ev.msg, EventMsg::SessionConfigured { .. }) { + break; + } + } + + agent +} + +/// Verifies that the agent streams incremental *AgentMessage* events **before** +/// emitting `TaskComplete` and that a second task inside the same session does +/// not get tripped up by a stale `previous_response_id`. +#[ignore] +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn live_streaming_and_prev_id_reset() { + if !api_key_available() { + eprintln!("skipping live_streaming_and_prev_id_reset – OPENAI_API_KEY not set"); + return; + } + + let codex = spawn_codex().await; + + // ---------- Task 1 ---------- + codex + .submit(Submission { + id: "task1".into(), + op: Op::UserInput { + items: vec![InputItem::Text { + text: "Say the words 'stream test'".into(), + }], + }, + }) + .await + .unwrap(); + + let mut saw_message_before_complete = false; + loop { + let ev = timeout(Duration::from_secs(60), codex.next_event()) + .await + .expect("timeout waiting for task1 events") + .expect("agent closed"); + + match ev.msg { + EventMsg::AgentMessage { .. } => saw_message_before_complete = true, + EventMsg::TaskComplete => break, + EventMsg::Error { message } => panic!("agent reported error in task1: {message}"), + _ => (), + } + } + + assert!( + saw_message_before_complete, + "Agent did not stream any AgentMessage before TaskComplete" + ); + + // ---------- Task 2 (same session) ---------- + codex + .submit(Submission { + id: "task2".into(), + op: Op::UserInput { + items: vec![InputItem::Text { + text: "Respond with exactly: second turn succeeded".into(), + }], + }, + }) + .await + .unwrap(); + + let mut got_expected = false; + loop { + let ev = timeout(Duration::from_secs(60), codex.next_event()) + .await + .expect("timeout waiting for task2 events") + .expect("agent closed"); + + match &ev.msg { + EventMsg::AgentMessage { message } if message.contains("second turn succeeded") => { + got_expected = true; + } + EventMsg::TaskComplete => break, + EventMsg::Error { message } => panic!("agent reported error in task2: {message}"), + _ => (), + } + } + + assert!(got_expected, "second task did not receive expected answer"); +} + +/// Exercises a *function‑call → shell execution* round‑trip by instructing the +/// model to run a harmless `echo` command. The test asserts that: +/// 1. the function call is executed (we see `ExecCommandBegin`/`End` events) +/// 2. the captured stdout reaches the client unchanged. +#[ignore] +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn live_shell_function_call() { + if !api_key_available() { + eprintln!("skipping live_shell_function_call – OPENAI_API_KEY not set"); + return; + } + + let codex = spawn_codex().await; + + const MARKER: &str = "codex_live_echo_ok"; + + codex + .submit(Submission { + id: "task_fn".into(), + op: Op::UserInput { + items: vec![InputItem::Text { + text: format!( + "Use the shell function to run the command `echo {MARKER}` and no other commands." + ), + }], + }, + }) + .await + .unwrap(); + + let mut saw_begin = false; + let mut saw_end_with_output = false; + + loop { + let ev = timeout(Duration::from_secs(60), codex.next_event()) + .await + .expect("timeout waiting for function‑call events") + .expect("agent closed"); + + match ev.msg { + EventMsg::ExecCommandBegin { command, .. } => { + assert_eq!(command, vec!["echo", MARKER]); + saw_begin = true; + } + EventMsg::ExecCommandEnd { + stdout, exit_code, .. + } => { + assert_eq!(exit_code, 0, "echo returned non‑zero exit code"); + assert!(stdout.contains(MARKER)); + saw_end_with_output = true; + } + EventMsg::TaskComplete => break, + EventMsg::Error { message } => panic!("agent error during shell test: {message}"), + _ => (), + } + } + + assert!(saw_begin, "ExecCommandBegin event missing"); + assert!( + saw_end_with_output, + "ExecCommandEnd with expected output missing" + ); +} diff --git a/codex-rs/core/tests/live_cli.rs b/codex-rs/core/tests/live_cli.rs new file mode 100644 index 00000000..bfae984d --- /dev/null +++ b/codex-rs/core/tests/live_cli.rs @@ -0,0 +1,143 @@ +//! Optional smoke tests that hit the real OpenAI /v1/responses endpoint. They are `#[ignore]` by +//! default so CI stays deterministic and free. Developers can run them locally with +//! `cargo test --test live_cli -- --ignored` provided they set a valid `OPENAI_API_KEY`. + +use assert_cmd::prelude::*; +use predicates::prelude::*; +use std::process::Command; +use std::process::Stdio; +use tempfile::TempDir; + +fn require_api_key() -> String { + std::env::var("OPENAI_API_KEY") + .expect("OPENAI_API_KEY env var not set — skip running live tests") +} + +/// Helper that spawns the binary inside a TempDir with minimal flags. Returns (Assert, TempDir). +fn run_live(prompt: &str) -> (assert_cmd::assert::Assert, TempDir) { + use std::io::Read; + use std::io::Write; + use std::thread; + + let dir = TempDir::new().unwrap(); + + // Build a plain `std::process::Command` so we have full control over the underlying stdio + // handles. `assert_cmd`’s own `Command` wrapper always forces stdout/stderr to be piped + // internally which prevents us from streaming them live to the terminal (see its `spawn` + // implementation). Instead we configure the std `Command` ourselves, then later hand the + // resulting `Output` to `assert_cmd` for the familiar assertions. + + let mut cmd = Command::cargo_bin("codex-rs").unwrap(); + cmd.current_dir(dir.path()); + cmd.env("OPENAI_API_KEY", require_api_key()); + + // We want three things at once: + // 1. live streaming of the child’s stdout/stderr while the test is running + // 2. captured output so we can keep using assert_cmd’s `Assert` helpers + // 3. cross‑platform behavior (best effort) + // + // To get that we: + // • set both stdout and stderr to `piped()` so we can read them programmatically + // • spawn a thread for each stream that copies bytes into two sinks: + // – the parent process’ stdout/stderr for live visibility + // – an in‑memory buffer so we can pass it to `assert_cmd` later + + // Pass the prompt through the `--` separator so the CLI knows when user input ends. + cmd.arg("--allow-no-git-exec") + .arg("-v") + .arg("--") + .arg(prompt); + + cmd.stdin(Stdio::piped()); + cmd.stdout(Stdio::piped()); + cmd.stderr(Stdio::piped()); + + let mut child = cmd.spawn().expect("failed to spawn codex-rs"); + + // Send the terminating newline so Session::run exits after the first turn. + child + .stdin + .as_mut() + .expect("child stdin unavailable") + .write_all(b"\n") + .expect("failed to write to child stdin"); + + // Helper that tees a ChildStdout/ChildStderr into both the parent’s stdio and a Vec. + fn tee( + mut reader: R, + mut writer: impl Write + Send + 'static, + ) -> thread::JoinHandle> { + thread::spawn(move || { + let mut buf = Vec::new(); + let mut chunk = [0u8; 4096]; + loop { + match reader.read(&mut chunk) { + Ok(0) => break, + Ok(n) => { + writer.write_all(&chunk[..n]).ok(); + writer.flush().ok(); + buf.extend_from_slice(&chunk[..n]); + } + Err(_) => break, + } + } + buf + }) + } + + let stdout_handle = tee( + child.stdout.take().expect("child stdout"), + std::io::stdout(), + ); + let stderr_handle = tee( + child.stderr.take().expect("child stderr"), + std::io::stderr(), + ); + + let status = child.wait().expect("failed to wait on child"); + let stdout = stdout_handle.join().expect("stdout thread panicked"); + let stderr = stderr_handle.join().expect("stderr thread panicked"); + + let output = std::process::Output { + status, + stdout, + stderr, + }; + + (output.assert(), dir) +} + +#[ignore] +#[test] +fn live_create_file_hello_txt() { + if std::env::var("OPENAI_API_KEY").is_err() { + eprintln!("skipping live_create_file_hello_txt – OPENAI_API_KEY not set"); + return; + } + + let (assert, dir) = run_live("Use the shell tool with the apply_patch command to create a file named hello.txt containing the text 'hello'."); + + assert.success(); + + let path = dir.path().join("hello.txt"); + assert!(path.exists(), "hello.txt was not created by the model"); + + let contents = std::fs::read_to_string(path).unwrap(); + + assert_eq!(contents.trim(), "hello"); +} + +#[ignore] +#[test] +fn live_print_working_directory() { + if std::env::var("OPENAI_API_KEY").is_err() { + eprintln!("skipping live_print_working_directory – OPENAI_API_KEY not set"); + return; + } + + let (assert, dir) = run_live("Print the current working directory using the shell function."); + + assert + .success() + .stdout(predicate::str::contains(dir.path().to_string_lossy())); +} diff --git a/codex-rs/core/tests/previous_response_id.rs b/codex-rs/core/tests/previous_response_id.rs new file mode 100644 index 00000000..56fa9a6c --- /dev/null +++ b/codex-rs/core/tests/previous_response_id.rs @@ -0,0 +1,156 @@ +use std::time::Duration; + +use codex_core::protocol::AskForApproval; +use codex_core::protocol::InputItem; +use codex_core::protocol::Op; +use codex_core::protocol::SandboxPolicy; +use codex_core::protocol::Submission; +use codex_core::Codex; +use serde_json::Value; +use tokio::time::timeout; +use wiremock::matchers::method; +use wiremock::matchers::path; +use wiremock::Match; +use wiremock::Mock; +use wiremock::MockServer; +use wiremock::Request; +use wiremock::ResponseTemplate; + +/// Matcher asserting that JSON body has NO `previous_response_id` field. +struct NoPrevId; + +impl Match for NoPrevId { + fn matches(&self, req: &Request) -> bool { + serde_json::from_slice::(&req.body) + .map(|v| v.get("previous_response_id").is_none()) + .unwrap_or(false) + } +} + +/// Matcher asserting that JSON body HAS a `previous_response_id` field. +struct HasPrevId; + +impl Match for HasPrevId { + fn matches(&self, req: &Request) -> bool { + serde_json::from_slice::(&req.body) + .map(|v| v.get("previous_response_id").is_some()) + .unwrap_or(false) + } +} + +/// Build minimal SSE stream with completed marker. +fn sse_completed(id: &str) -> String { + format!( + "event: response.completed\n\ +data: {{\"type\":\"response.completed\",\"response\":{{\"id\":\"{}\",\"output\":[]}}}}\n\n\n", + id + ) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn keeps_previous_response_id_between_tasks() { + // Mock server + let server = MockServer::start().await; + + // First request – must NOT include `previous_response_id`. + let first = ResponseTemplate::new(200) + .insert_header("content-type", "text/event-stream") + .set_body_raw(sse_completed("resp1"), "text/event-stream"); + + Mock::given(method("POST")) + .and(path("/v1/responses")) + .and(NoPrevId) + .respond_with(first) + .expect(1) + .mount(&server) + .await; + + // Second request – MUST include `previous_response_id`. + let second = ResponseTemplate::new(200) + .insert_header("content-type", "text/event-stream") + .set_body_raw(sse_completed("resp2"), "text/event-stream"); + + Mock::given(method("POST")) + .and(path("/v1/responses")) + .and(HasPrevId) + .respond_with(second) + .expect(1) + .mount(&server) + .await; + + // Environment + std::env::set_var("OPENAI_API_KEY", "test-key"); + std::env::set_var("OPENAI_API_BASE", server.uri()); + std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "0"); + std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "0"); + + let codex = Codex::spawn(std::sync::Arc::new(tokio::sync::Notify::new())).unwrap(); + + // Init session + codex + .submit(Submission { + id: "init".into(), + op: Op::ConfigureSession { + model: None, + instructions: None, + approval_policy: AskForApproval::OnFailure, + sandbox_policy: SandboxPolicy::NetworkAndFileWriteRestricted, + }, + }) + .await + .unwrap(); + // drain init event + let _ = codex.next_event().await.unwrap(); + + // Task 1 – triggers first request (no previous_response_id) + codex + .submit(Submission { + id: "task1".into(), + op: Op::UserInput { + items: vec![InputItem::Text { + text: "hello".into(), + }], + }, + }) + .await + .unwrap(); + + // Wait for TaskComplete + loop { + let ev = timeout(Duration::from_secs(1), codex.next_event()) + .await + .unwrap() + .unwrap(); + if matches!(ev.msg, codex_core::protocol::EventMsg::TaskComplete) { + break; + } + } + + // Task 2 – should include `previous_response_id` (triggers second request) + codex + .submit(Submission { + id: "task2".into(), + op: Op::UserInput { + items: vec![InputItem::Text { + text: "again".into(), + }], + }, + }) + .await + .unwrap(); + + // Wait for TaskComplete or error + loop { + let ev = timeout(Duration::from_secs(1), codex.next_event()) + .await + .unwrap() + .unwrap(); + match ev.msg { + codex_core::protocol::EventMsg::TaskComplete => break, + codex_core::protocol::EventMsg::Error { message } => { + panic!("unexpected error: {message}") + } + _ => (), + } + } +} diff --git a/codex-rs/core/tests/stream_no_completed.rs b/codex-rs/core/tests/stream_no_completed.rs new file mode 100644 index 00000000..da0cfb27 --- /dev/null +++ b/codex-rs/core/tests/stream_no_completed.rs @@ -0,0 +1,109 @@ +//! Verifies that the agent retries when the SSE stream terminates before +//! delivering a `response.completed` event. + +use std::time::Duration; + +use codex_core::protocol::AskForApproval; +use codex_core::protocol::InputItem; +use codex_core::protocol::Op; +use codex_core::protocol::SandboxPolicy; +use codex_core::protocol::Submission; +use codex_core::Codex; +use tokio::time::timeout; +use wiremock::matchers::method; +use wiremock::matchers::path; +use wiremock::Mock; +use wiremock::MockServer; +use wiremock::Request; +use wiremock::Respond; +use wiremock::ResponseTemplate; + +fn sse_incomplete() -> String { + // Only a single line; missing the completed event. + "event: response.output_item.done\n\n".to_string() +} + +fn sse_completed(id: &str) -> String { + format!( + "event: response.completed\n\ +data: {{\"type\":\"response.completed\",\"response\":{{\"id\":\"{}\",\"output\":[]}}}}\n\n\n", + id + ) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn retries_on_early_close() { + let server = MockServer::start().await; + + struct SeqResponder; + impl Respond for SeqResponder { + fn respond(&self, _: &Request) -> ResponseTemplate { + use std::sync::atomic::AtomicUsize; + use std::sync::atomic::Ordering; + static CALLS: AtomicUsize = AtomicUsize::new(0); + let n = CALLS.fetch_add(1, Ordering::SeqCst); + if n == 0 { + ResponseTemplate::new(200) + .insert_header("content-type", "text/event-stream") + .set_body_raw(sse_incomplete(), "text/event-stream") + } else { + ResponseTemplate::new(200) + .insert_header("content-type", "text/event-stream") + .set_body_raw(sse_completed("resp_ok"), "text/event-stream") + } + } + } + + Mock::given(method("POST")) + .and(path("/v1/responses")) + .respond_with(SeqResponder {}) + .expect(2) + .mount(&server) + .await; + + // Environment + std::env::set_var("OPENAI_API_KEY", "test-key"); + std::env::set_var("OPENAI_API_BASE", server.uri()); + std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "0"); + std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "1"); + std::env::set_var("OPENAI_STREAM_IDLE_TIMEOUT_MS", "2000"); + + let codex = Codex::spawn(std::sync::Arc::new(tokio::sync::Notify::new())).unwrap(); + + codex + .submit(Submission { + id: "init".into(), + op: Op::ConfigureSession { + model: None, + instructions: None, + approval_policy: AskForApproval::OnFailure, + sandbox_policy: SandboxPolicy::NetworkAndFileWriteRestricted, + }, + }) + .await + .unwrap(); + let _ = codex.next_event().await.unwrap(); + + codex + .submit(Submission { + id: "task".into(), + op: Op::UserInput { + items: vec![InputItem::Text { + text: "hello".into(), + }], + }, + }) + .await + .unwrap(); + + // Wait until TaskComplete (should succeed after retry). + loop { + let ev = timeout(Duration::from_secs(10), codex.next_event()) + .await + .unwrap() + .unwrap(); + if matches!(ev.msg, codex_core::protocol::EventMsg::TaskComplete) { + break; + } + } +} diff --git a/codex-rs/docs/protocol_v1.md b/codex-rs/docs/protocol_v1.md new file mode 100644 index 00000000..4d0a6e25 --- /dev/null +++ b/codex-rs/docs/protocol_v1.md @@ -0,0 +1,172 @@ +Overview of Protocol Defined in [protocol.rs](../core/src/protocol.rs) and [agent.rs](../core/src/agent.rs). + +The goal of this document is to define terminology used in the system and explain the expected behavior of the system. + +NOTE: The code might not completely match this spec. There are a few minor changes that need to be made after this spec has been reviewed, which will not alter the existing TUI's functionality. + +## Entities + +These are entities exit on the codex backend. The intent of this section is to establish vocabulary and construct a shared mental model for the `Codex` core system. + +0. `Model` + - In our case, this is the Responses REST API +1. `Codex` + - The core engine of codex + - Runs locally, either in a background thread or separate process + - Communicated to via a queue pair – SQ (Submission Queue) / EQ (Event Queue) + - Takes user input, makes requests to the `Model`, executes commands and applies patches. +2. `Session` + - The `Codex`'s current configuration and state + - `Codex` starts with no `Session`, and it is initialized by `Op::ConfigureSession`, which should be the first message sent by the UI. + - The current `Session` can be reconfigured with additional `Op::ConfigureSession` calls. + - Any running execution is aborted when the session is reconfigured. +3. `Task` + - A `Task` is `Codex` executing work in response to user input. + - `Session` has at most one `Task` running at a time. + - Receiving `Op::UserInput` starts a `Task` + - Consists of a series of `Turn`s + - The `Task` executes to until: + - The `Model` completes the task and there is no output to feed into an additional `Turn` + - Additional `Op::UserInput` aborts the current task and starts a new one + - UI interrupts with `Op::Interrupt` + - Fatal errors are encountered, eg. `Model` connection exceeding retry limits + - Blocked by user approval (executing a command or patch) +4. `Turn` + - One cycle of iteration in a `Task`, consists of: + - A request to the `Model` - (initially) prompt + (optional) `last_response_id`, or (in loop) previous turn output + - The `Model` streams responses back in an SSE, which are collected until "completed" message and the SSE terminates + - `Codex` then executes command(s), applies patch(es), and outputs message(s) returned by the `Model` + - Pauses to request approval when necessary + - The output of one `Turn` is the input to the next `Turn` + - A `Turn` yielding no output terminates the `Task` + +The term "UI" is used to refer to the application driving `Codex`. This may be the CLI / TUI chat-like interface that users operate, or it may be a GUI interface like a VSCode extension. The UI is external to `Codex`, as `Codex` is intended to be operated by arbitrary UI implementations. + +When a `Turn` completes, the `response_id` from the `Model`'s final `response.completed` message is stored in the `Session` state to resume the thread given the next `Op::UserInput`. The `response_id` is also returned in the `EventMsg::TurnComplete` to the UI, which can be used to fork the thread from an earlier point by providing it in the `Op::UserInput`. + +Since only 1 `Task` can be run at a time, for parallel tasks it is recommended that a single `Codex` be run for each thread of work. + +## Interface + +- `Codex` + - Communicates with UI via a `SQ` (Submission Queue) and `EQ` (Event Queue). +- `Submission` + - These are messages sent on the `SQ` (UI -> `Codex`) + - Has an string ID provided by the UI, referred to as `sub_id` + - `Op` refers to the enum of all possible `Submission` payloads + - This enum is `non_exhaustive`; variants can be added at future dates +- `Event` + - These are messages sent on the `EQ` (`Codex` -> UI) + - Each `Event` has a non-unique ID, matching the `sub_id` from the `Op::UserInput` that started the current task. + - `EventMsg` refers to the enum of all possible `Event` payloads + - This enum is `non_exhaustive`; variants can be added at future dates + - It should be expected that new `EventMsg` variants will be added over time to expose more detailed information about the model's actions. + +For complete documentation of the `Op` and `EventMsg` variants, refer to [protocol.rs](../core/src/protocol.rs). Some example payload types: + +- `Op` + - `Op::UserInput` – Any input from the user to kick off a `Task` + - `Op::Interrupt` – Interrupts a running task + - `Op::ExecApproval` – Approve or deny code execution +- `EventMsg` + - `EventMsg::AgentMessage` – Messages from the `Model` + - `EventMsg::ExecApprovalRequest` – Request approval from user to execute a command + - `EventMsg::TaskComplete` – A task completed successfully + - `EventMsg::Error` – A task stopped with an error + - `EventMsg::TurnComplete` – Contains a `response_id` bookmark for last `response_id` executed by the task. This can be used to continue the task at a later point in time, perhaps with additional user input. + +The `response_id` returned from each task matches the OpenAI `response_id` stored in the API's `/responses` endpoint. It can be stored and used in future `Sessions` to resume threads of work. + +## Transport + +Can operate over any transport that supports bi-directional streaming. - cross-thread channels - IPC channels - stdin/stdout - TCP - HTTP2 - gRPC + +Non-framed transports, such as stdin/stdout and TCP, should use newline-delimited JSON in sending messages. + +## Example Flows + +Sequence diagram examples of common interactions. In each diagram, some unimportant events may be eliminated for simplicity. + +### Basic UI Flow + +A single user input, followed by a 2-turn task + +```mermaid +sequenceDiagram + box UI + participant user as User + end + box Daemon + participant codex as Codex + participant session as Session + participant task as Task + end + box Rest API + participant agent as Model + end + user->>codex: Op::ConfigureSession + codex-->>session: create session + codex->>user: Event::SessionConfigured + user->>session: Op::UserInput + session-->>+task: start task + task->>user: Event::TaskStarted + task->>agent: prompt + agent->>task: response (exec) + task->>-user: Event::ExecApprovalRequest + user->>+task: Op::ExecApproval::Allow + task->>user: Event::ExecStart + task->>task: exec + task->>user: Event::ExecStop + task->>user: Event::TurnComplete + task->>agent: stdout + agent->>task: response (patch) + task->>task: apply patch (auto-approved) + task->>agent: success + agent->>task: response
(msg + completed) + task->>user: Event::AgentMessage + task->>user: Event::TurnComplete + task->>-user: Event::TaskComplete +``` + +### Task Interrupt + +Interrupting a task and continuing with additional user input. + +```mermaid +sequenceDiagram + box UI + participant user as User + end + box Daemon + participant session as Session + participant task1 as Task1 + participant task2 as Task2 + end + box Rest API + participant agent as Model + end + user->>session: Op::UserInput + session-->>+task1: start task + task1->>user: Event::TaskStarted + task1->>agent: prompt + agent->>task1: response (exec) + task1->>task1: exec (auto-approved) + task1->>user: Event::TurnComplete + task1->>agent: stdout + task1->>agent: response (exec) + task1->>task1: exec (auto-approved) + user->>task1: Op::Interrupt + task1->>-user: Event::Error("interrupted") + user->>session: Op::UserInput w/ last_response_id + session-->>+task2: start task + task2->>user: Event::TaskStarted + task2->>agent: prompt + Task1 last_response_id + agent->>task2: response (exec) + task2->>task2: exec (auto-approve) + task2->>user: Event::TurnCompleted + task2->>agent: stdout + agent->>task2: msg + completed + task2->>user: Event::AgentMessage + task2->>user: Event::TurnCompleted + task2->>-user: Event::TaskCompleted +``` diff --git a/codex-rs/exec/Cargo.toml b/codex-rs/exec/Cargo.toml new file mode 100644 index 00000000..f214f900 --- /dev/null +++ b/codex-rs/exec/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codex-exec" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "codex-exec" +path = "src/main.rs" + +[lib] +name = "codex_exec" +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +clap = { version = "4", features = ["derive"] } +codex-core = { path = "../core" } +tokio = { version = "1", features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } +tracing = { version = "0.1.41", features = ["log"] } +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } diff --git a/codex-rs/exec/src/cli.rs b/codex-rs/exec/src/cli.rs new file mode 100644 index 00000000..a934aba0 --- /dev/null +++ b/codex-rs/exec/src/cli.rs @@ -0,0 +1,21 @@ +use clap::Parser; +use std::path::PathBuf; + +#[derive(Parser, Debug)] +#[command(version)] +pub struct Cli { + /// Optional image(s) to attach to the initial prompt. + #[arg(long = "image", short = 'i', value_name = "FILE", value_delimiter = ',', num_args = 1..)] + pub images: Vec, + + /// Model the agent should use. + #[arg(long, short = 'm')] + pub model: Option, + + /// Allow running Codex outside a Git repository. + #[arg(long = "skip-git-repo-check", default_value_t = false)] + pub skip_git_repo_check: bool, + + /// Initial instructions for the agent. + pub prompt: Option, +} diff --git a/codex-rs/exec/src/lib.rs b/codex-rs/exec/src/lib.rs new file mode 100644 index 00000000..c22b6bd6 --- /dev/null +++ b/codex-rs/exec/src/lib.rs @@ -0,0 +1,208 @@ +mod cli; +use std::sync::Arc; + +pub use cli::Cli; +use codex_core::codex_wrapper; +use codex_core::protocol::AskForApproval; +use codex_core::protocol::Event; +use codex_core::protocol::EventMsg; +use codex_core::protocol::FileChange; +use codex_core::protocol::InputItem; +use codex_core::protocol::Op; +use codex_core::protocol::SandboxPolicy; +use codex_core::util::is_inside_git_repo; +use tracing::debug; +use tracing::error; +use tracing::info; +use tracing_subscriber::EnvFilter; + +pub async fn run_main(cli: Cli) -> anyhow::Result<()> { + // TODO(mbolin): Take a more thoughtful approach to logging. + let default_level = "error"; + let allow_ansi = true; + let _ = tracing_subscriber::fmt() + .with_env_filter( + EnvFilter::try_from_default_env() + .or_else(|_| EnvFilter::try_new(default_level)) + .unwrap(), + ) + .with_ansi(allow_ansi) + .with_writer(std::io::stderr) + .try_init(); + + let Cli { + skip_git_repo_check, + model, + images, + prompt, + .. + } = cli; + + if !skip_git_repo_check && !is_inside_git_repo() { + eprintln!("Not inside a Git repo and --skip-git-repo-check was not specified."); + std::process::exit(1); + } else if images.is_empty() && prompt.is_none() { + eprintln!("No images or prompt specified."); + std::process::exit(1); + } + + // TODO(mbolin): We are reworking the CLI args right now, so this will + // likely come from a new --execution-policy arg. + let approval_policy = AskForApproval::Never; + let sandbox_policy = SandboxPolicy::NetworkAndFileWriteRestricted; + let (codex_wrapper, event, ctrl_c) = + codex_wrapper::init_codex(approval_policy, sandbox_policy, model).await?; + let codex = Arc::new(codex_wrapper); + info!("Codex initialized with event: {event:?}"); + + let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::(); + { + let codex = codex.clone(); + tokio::spawn(async move { + loop { + let interrupted = ctrl_c.notified(); + tokio::select! { + _ = interrupted => { + // Forward an interrupt to the codex so it can abort any in‑flight task. + let _ = codex + .submit( + Op::Interrupt, + ) + .await; + + // Exit the inner loop and return to the main input prompt. The codex + // will emit a `TurnInterrupted` (Error) event which is drained later. + break; + } + res = codex.next_event() => match res { + Ok(event) => { + debug!("Received event: {event:?}"); + process_event(&event); + if let Err(e) = tx.send(event) { + error!("Error sending event: {e:?}"); + break; + } + }, + Err(e) => { + error!("Error receiving event: {e:?}"); + break; + } + } + } + } + }); + } + + if !images.is_empty() { + // Send images first. + let items: Vec = images + .into_iter() + .map(|path| InputItem::LocalImage { path }) + .collect(); + let initial_images_event_id = codex.submit(Op::UserInput { items }).await?; + info!("Sent images with event ID: {initial_images_event_id}"); + while let Ok(event) = codex.next_event().await { + if event.id == initial_images_event_id && matches!(event.msg, EventMsg::TaskComplete) { + break; + } + } + } + + if let Some(prompt) = prompt { + // Send the prompt. + let items: Vec = vec![InputItem::Text { text: prompt }]; + let initial_prompt_task_id = codex.submit(Op::UserInput { items }).await?; + info!("Sent prompt with event ID: {initial_prompt_task_id}"); + while let Some(event) = rx.recv().await { + if event.id == initial_prompt_task_id && matches!(event.msg, EventMsg::TaskComplete) { + break; + } + } + } + + Ok(()) +} + +fn process_event(event: &Event) { + let Event { id, msg } = event; + match msg { + EventMsg::Error { message } => { + println!("Error: {message}"); + } + EventMsg::BackgroundEvent { .. } => { + // Ignore these for now. + } + EventMsg::TaskStarted => { + println!("Task started: {id}"); + } + EventMsg::TaskComplete => { + println!("Task complete: {id}"); + } + EventMsg::AgentMessage { message } => { + println!("Agent message: {message}"); + } + EventMsg::ExecCommandBegin { + call_id, + command, + cwd, + } => { + println!("exec('{call_id}'): {:?} in {cwd}", command); + } + EventMsg::ExecCommandEnd { + call_id, + stdout, + stderr, + exit_code, + } => { + let output = if *exit_code == 0 { stdout } else { stderr }; + let truncated_output = output.lines().take(5).collect::>().join("\n"); + println!("exec('{call_id}') exited {exit_code}:\n{truncated_output}"); + } + EventMsg::PatchApplyBegin { + call_id, + auto_approved, + changes, + } => { + let changes = changes + .iter() + .map(|(path, change)| { + format!("{} {}", format_file_change(change), path.to_string_lossy()) + }) + .collect::>() + .join("\n"); + println!("apply_patch('{call_id}') auto_approved={auto_approved}:\n{changes}"); + } + EventMsg::PatchApplyEnd { + call_id, + stdout, + stderr, + success, + } => { + let (exit_code, output) = if *success { (0, stdout) } else { (1, stderr) }; + let truncated_output = output.lines().take(5).collect::>().join("\n"); + println!("apply_patch('{call_id}') exited {exit_code}:\n{truncated_output}"); + } + EventMsg::ExecApprovalRequest { .. } => { + // Should we exit? + } + EventMsg::ApplyPatchApprovalRequest { .. } => { + // Should we exit? + } + _ => { + // Ignore event. + } + } +} + +fn format_file_change(change: &FileChange) -> &'static str { + match change { + FileChange::Add { .. } => "A", + FileChange::Delete => "D", + FileChange::Update { + move_path: Some(_), .. + } => "R", + FileChange::Update { + move_path: None, .. + } => "M", + } +} diff --git a/codex-rs/exec/src/main.rs b/codex-rs/exec/src/main.rs new file mode 100644 index 00000000..94a02810 --- /dev/null +++ b/codex-rs/exec/src/main.rs @@ -0,0 +1,11 @@ +use clap::Parser; +use codex_exec::run_main; +use codex_exec::Cli; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = Cli::parse(); + run_main(cli).await?; + + Ok(()) +} diff --git a/codex-rs/interactive/Cargo.toml b/codex-rs/interactive/Cargo.toml new file mode 100644 index 00000000..b2a7234e --- /dev/null +++ b/codex-rs/interactive/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "codex-interactive" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "codex-interactive" +path = "src/main.rs" + +[lib] +name = "codex_interactive" +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +clap = { version = "4", features = ["derive"] } +codex-core = { path = "../core", features = ["cli"] } +tokio = { version = "1", features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } diff --git a/codex-rs/interactive/src/cli.rs b/codex-rs/interactive/src/cli.rs new file mode 100644 index 00000000..ffb61dfc --- /dev/null +++ b/codex-rs/interactive/src/cli.rs @@ -0,0 +1,33 @@ +use clap::Parser; +use codex_core::ApprovalModeCliArg; +use codex_core::SandboxModeCliArg; +use std::path::PathBuf; + +#[derive(Parser, Debug)] +#[command(version)] +pub struct Cli { + /// Optional image(s) to attach to the initial prompt. + #[arg(long = "image", short = 'i', value_name = "FILE", value_delimiter = ',', num_args = 1..)] + pub images: Vec, + + /// Model the agent should use. + #[arg(long, short = 'm')] + pub model: Option, + + /// Configure when the model requires human approval before executing a command. + #[arg(long = "ask-for-approval", short = 'a', value_enum, default_value_t = ApprovalModeCliArg::OnFailure)] + pub approval_policy: ApprovalModeCliArg, + + /// Configure the process restrictions when a command is executed. + /// + /// Uses OS-specific sandboxing tools; Seatbelt on OSX, landlock+seccomp on Linux. + #[arg(long = "sandbox", short = 's', value_enum, default_value_t = SandboxModeCliArg::NetworkAndFileWriteRestricted)] + pub sandbox_policy: SandboxModeCliArg, + + /// Allow running Codex outside a Git repository. + #[arg(long = "skip-git-repo-check", default_value_t = false)] + pub skip_git_repo_check: bool, + + /// Initial instructions for the agent. + pub prompt: Option, +} diff --git a/codex-rs/interactive/src/lib.rs b/codex-rs/interactive/src/lib.rs new file mode 100644 index 00000000..a36a0ee2 --- /dev/null +++ b/codex-rs/interactive/src/lib.rs @@ -0,0 +1,7 @@ +mod cli; +pub use cli::Cli; + +pub async fn run_main(_cli: Cli) -> anyhow::Result<()> { + eprintln!("Interactive mode is not implemented yet."); + std::process::exit(1); +} diff --git a/codex-rs/interactive/src/main.rs b/codex-rs/interactive/src/main.rs new file mode 100644 index 00000000..20f3fb1d --- /dev/null +++ b/codex-rs/interactive/src/main.rs @@ -0,0 +1,11 @@ +use clap::Parser; +use codex_interactive::run_main; +use codex_interactive::Cli; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = Cli::parse(); + run_main(cli).await?; + + Ok(()) +} diff --git a/codex-rs/justfile b/codex-rs/justfile new file mode 100644 index 00000000..f2ef5029 --- /dev/null +++ b/codex-rs/justfile @@ -0,0 +1,19 @@ +# Display help +help: + just -l + +# Install the `codex-tui` binary +install: + cargo install --path tui + +# Run the TUI app +tui *args: + cargo run --bin codex -- tui {{args}} + +# Run the REPL app +repl *args: + cargo run --bin codex -- repl {{args}} + +# Run the Proto app +proto *args: + cargo run --bin codex -- proto {{args}} diff --git a/codex-rs/repl/Cargo.toml b/codex-rs/repl/Cargo.toml new file mode 100644 index 00000000..24494ea0 --- /dev/null +++ b/codex-rs/repl/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "codex-repl" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "codex-repl" +path = "src/main.rs" + +[lib] +name = "codex_repl" +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +clap = { version = "4", features = ["derive"] } +codex-core = { path = "../core", features = ["cli"] } +owo-colors = "4.2.0" +rand = "0.9" +tokio = { version = "1", features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } +tracing = { version = "0.1.41", features = ["log"] } +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } diff --git a/codex-rs/repl/src/cli.rs b/codex-rs/repl/src/cli.rs new file mode 100644 index 00000000..bb83046d --- /dev/null +++ b/codex-rs/repl/src/cli.rs @@ -0,0 +1,60 @@ +use clap::ArgAction; +use clap::Parser; +use codex_core::ApprovalModeCliArg; +use codex_core::SandboxModeCliArg; +use std::path::PathBuf; + +/// Command‑line arguments. +#[derive(Debug, Parser)] +#[command( + author, + version, + about = "Interactive Codex CLI that streams all agent actions." +)] +pub struct Cli { + /// User prompt to start the session. + pub prompt: Option, + + /// Override the default model from ~/.codex/config.toml. + #[arg(short, long)] + pub model: Option, + + /// Optional images to attach to the prompt. + #[arg(long, value_name = "FILE")] + pub images: Vec, + + /// Increase verbosity (-v info, -vv debug, -vvv trace). + /// + /// The flag may be passed up to three times. Without any -v the CLI only prints warnings and errors. + #[arg(short, long, action = ArgAction::Count)] + pub verbose: u8, + + /// Don't use colored ansi output for verbose logging + #[arg(long)] + pub no_ansi: bool, + + /// Configure when the model requires human approval before executing a command. + #[arg(long = "ask-for-approval", short = 'a', value_enum, default_value_t = ApprovalModeCliArg::OnFailure)] + pub approval_policy: ApprovalModeCliArg, + + /// Configure the process restrictions when a command is executed. + /// + /// Uses OS-specific sandboxing tools; Seatbelt on OSX, landlock+seccomp on Linux. + #[arg(long = "sandbox", short = 's', value_enum, default_value_t = SandboxModeCliArg::NetworkAndFileWriteRestricted)] + pub sandbox_policy: SandboxModeCliArg, + + /// Allow running Codex outside a Git repository. By default the CLI + /// aborts early when the current working directory is **not** inside a + /// Git repo because most agents rely on `git` for interacting with the + /// code‑base. Pass this flag if you really know what you are doing. + #[arg(long, action = ArgAction::SetTrue, default_value_t = false)] + pub allow_no_git_exec: bool, + + /// Record submissions into file as JSON + #[arg(short = 'S', long)] + pub record_submissions: Option, + + /// Record events into file as JSON + #[arg(short = 'E', long)] + pub record_events: Option, +} diff --git a/codex-rs/repl/src/lib.rs b/codex-rs/repl/src/lib.rs new file mode 100644 index 00000000..2266718e --- /dev/null +++ b/codex-rs/repl/src/lib.rs @@ -0,0 +1,423 @@ +use std::io::stdin; +use std::io::stdout; +use std::io::Write; +use std::sync::Arc; + +use codex_core::config::Config; +use codex_core::protocol; +use codex_core::protocol::FileChange; +use codex_core::util::is_inside_git_repo; +use codex_core::util::notify_on_sigint; +use codex_core::Codex; +use owo_colors::OwoColorize; +use owo_colors::Style; +use tokio::io::AsyncBufReadExt; +use tokio::io::BufReader; +use tokio::io::Lines; +use tokio::io::Stdin; +use tokio::sync::Notify; +use tracing::debug; +use tracing_subscriber::EnvFilter; + +mod cli; +pub use cli::Cli; + +/// Initialize the global logger once at startup based on the `--verbose` flag. +fn init_logger(verbose: u8, allow_ansi: bool) { + // Map -v occurrences to explicit log levels: + // 0 → warn (default) + // 1 → info + // 2 → debug + // ≥3 → trace + + let default_level = match verbose { + 0 => "warn", + 1 => "info", + 2 => "codex=debug", + _ => "codex=trace", + }; + + // Only initialize the logger once – repeated calls are ignored. `try_init` will return an + // error if another crate (like tests) initialized it first, which we can safely ignore. + // By default `tracing_subscriber::fmt()` writes formatted logs to stderr. That is fine when + // running the CLI manually but in our smoke tests we capture **stdout** (via `assert_cmd`) and + // ignore stderr. As a result none of the `tracing::info!` banners or warnings show up in the + // recorded output making it much harder to debug live runs. + + // Switch the logger's writer to stdout so both human runs and the integration tests see the + // same stream. Disable ANSI colors because the binary already prints plain text and color + // escape codes make predicate matching brittle. + let _ = tracing_subscriber::fmt() + .with_env_filter( + EnvFilter::try_from_default_env() + .or_else(|_| EnvFilter::try_new(default_level)) + .unwrap(), + ) + .with_ansi(allow_ansi) + .with_writer(std::io::stdout) + .try_init(); +} + +pub async fn run_main(cli: Cli) -> anyhow::Result<()> { + let ctrl_c = notify_on_sigint(); + + // Abort early when the user runs Codex outside a Git repository unless + // they explicitly acknowledged the risks with `--allow-no-git-exec`. + if !cli.allow_no_git_exec && !is_inside_git_repo() { + eprintln!( + "We recommend running codex inside a git repository. \ + If you understand the risks, you can proceed with \ + `--allow-no-git-exec`." + ); + std::process::exit(1); + } + + // Initialize logging before any other work so early errors are captured. + init_logger(cli.verbose, !cli.no_ansi); + + let config = Config::load().unwrap_or_default(); + + codex_main(cli, config, ctrl_c).await +} + +async fn codex_main(mut cli: Cli, cfg: Config, ctrl_c: Arc) -> anyhow::Result<()> { + let mut builder = Codex::builder(); + if let Some(path) = cli.record_submissions { + builder = builder.record_submissions(path); + } + if let Some(path) = cli.record_events { + builder = builder.record_events(path); + } + let codex = builder.spawn(Arc::clone(&ctrl_c))?; + let init_id = random_id(); + let init = protocol::Submission { + id: init_id.clone(), + op: protocol::Op::ConfigureSession { + model: cli.model.or(cfg.model), + instructions: cfg.instructions, + approval_policy: cli.approval_policy.into(), + sandbox_policy: cli.sandbox_policy.into(), + }, + }; + + out( + "initializing session", + MessagePriority::BackgroundEvent, + MessageActor::User, + ); + codex.submit(init).await?; + + // init + loop { + out( + "waiting for session initialization", + MessagePriority::BackgroundEvent, + MessageActor::User, + ); + let event = codex.next_event().await?; + if event.id == init_id { + if let protocol::EventMsg::Error { message } = event.msg { + anyhow::bail!("Error during initialization: {message}"); + } else { + out( + "session initialized", + MessagePriority::BackgroundEvent, + MessageActor::User, + ); + break; + } + } + } + + // run loop + let mut reader = InputReader::new(ctrl_c.clone()); + loop { + let text = match cli.prompt.take() { + Some(input) => input, + None => match reader.request_input().await? { + Some(input) => input, + None => { + // ctrl + d + println!(); + return Ok(()); + } + }, + }; + if text.trim().is_empty() { + continue; + } + // Interpret certain single‑word commands as immediate termination requests. + let trimmed = text.trim(); + if trimmed == "q" { + // Exit gracefully. + println!("Exiting…"); + return Ok(()); + } + + let sub = protocol::Submission { + id: random_id(), + op: protocol::Op::UserInput { + items: vec![protocol::InputItem::Text { text }], + }, + }; + + out( + "sending request to model", + MessagePriority::TaskProgress, + MessageActor::User, + ); + codex.submit(sub).await?; + + // Wait for agent events **or** user interrupts (Ctrl+C). + 'inner: loop { + // Listen for either the next agent event **or** a SIGINT notification. Using + // `tokio::select!` allows the user to cancel a long‑running request that would + // otherwise leave the CLI stuck waiting for a server response. + let event = { + let interrupted = ctrl_c.notified(); + tokio::select! { + _ = interrupted => { + // Forward an interrupt to the agent so it can abort any in‑flight task. + let _ = codex + .submit(protocol::Submission { + id: random_id(), + op: protocol::Op::Interrupt, + }) + .await; + + // Exit the inner loop and return to the main input prompt. The agent + // will emit a `TurnInterrupted` (Error) event which is drained later. + break 'inner; + } + res = codex.next_event() => res? + } + }; + + debug!(?event, "Got event"); + let id = event.id; + match event.msg { + protocol::EventMsg::Error { message } => { + println!("Error: {message}"); + break 'inner; + } + protocol::EventMsg::TaskComplete => break 'inner, + protocol::EventMsg::AgentMessage { message } => { + out(&message, MessagePriority::UserMessage, MessageActor::Agent) + } + protocol::EventMsg::SessionConfigured { model } => { + debug!(model, "Session initialized"); + } + protocol::EventMsg::ExecApprovalRequest { + command, + cwd, + reason, + } => { + let reason_str = reason + .as_deref() + .map(|r| format!(" [{r}]")) + .unwrap_or_default(); + + let prompt = format!( + "approve command in {} {}{} (y/N): ", + cwd.display(), + command.join(" "), + reason_str + ); + let decision = request_user_approval2(prompt)?; + let sub = protocol::Submission { + id: random_id(), + op: protocol::Op::ExecApproval { id, decision }, + }; + out( + "submitting command approval", + MessagePriority::TaskProgress, + MessageActor::User, + ); + codex.submit(sub).await?; + } + protocol::EventMsg::ApplyPatchApprovalRequest { + changes, + reason: _, + grant_root: _, + } => { + let file_list = changes + .keys() + .map(|path| path.to_string_lossy().to_string()) + .collect::>() + .join(", "); + let request = + format!("approve apply_patch that will touch? {file_list} (y/N): "); + let decision = request_user_approval2(request)?; + let sub = protocol::Submission { + id: random_id(), + op: protocol::Op::PatchApproval { id, decision }, + }; + out( + "submitting patch approval", + MessagePriority::UserMessage, + MessageActor::Agent, + ); + codex.submit(sub).await?; + } + protocol::EventMsg::ExecCommandBegin { + command, + cwd, + call_id: _, + } => { + out( + &format!("running command: '{}' in '{}'", command.join(" "), cwd), + MessagePriority::BackgroundEvent, + MessageActor::Agent, + ); + } + protocol::EventMsg::ExecCommandEnd { + stdout, + stderr, + exit_code, + call_id: _, + } => { + let msg = if exit_code == 0 { + "command completed (exit 0)".to_string() + } else { + // Prefer stderr but fall back to stdout if empty. + let err_snippet = if !stderr.trim().is_empty() { + stderr.trim() + } else { + stdout.trim() + }; + format!("command failed (exit {exit_code}): {err_snippet}") + }; + out(&msg, MessagePriority::BackgroundEvent, MessageActor::Agent); + out( + "sending results to model", + MessagePriority::TaskProgress, + MessageActor::Agent, + ); + } + protocol::EventMsg::PatchApplyBegin { changes, .. } => { + // Emit PatchApplyBegin so the front‑end can show progress. + let summary = changes + .iter() + .map(|(path, change)| match change { + FileChange::Add { .. } => format!("A {}", path.display()), + FileChange::Delete => format!("D {}", path.display()), + FileChange::Update { .. } => format!("M {}", path.display()), + }) + .collect::>() + .join(", "); + + out( + &format!("applying patch: {summary}"), + MessagePriority::BackgroundEvent, + MessageActor::Agent, + ); + } + protocol::EventMsg::PatchApplyEnd { success, .. } => { + let status = if success { "success" } else { "failed" }; + out( + &format!("patch application {status}"), + MessagePriority::BackgroundEvent, + MessageActor::Agent, + ); + out( + "sending results to model", + MessagePriority::TaskProgress, + MessageActor::Agent, + ); + } + // Broad fallback; if the CLI is unaware of an event type, it will just + // print it as a generic BackgroundEvent. + e => { + out( + &format!("event: {e:?}"), + MessagePriority::BackgroundEvent, + MessageActor::Agent, + ); + } + } + } + } +} + +fn random_id() -> String { + let id: u64 = rand::random(); + id.to_string() +} + +fn request_user_approval2(request: String) -> anyhow::Result { + println!("{}", request); + + let mut line = String::new(); + stdin().read_line(&mut line)?; + let answer = line.trim().to_ascii_lowercase(); + let is_accepted = answer == "y" || answer == "yes"; + let decision = if is_accepted { + protocol::ReviewDecision::Approved + } else { + protocol::ReviewDecision::Denied + }; + Ok(decision) +} + +#[derive(Debug, Clone, Copy)] +enum MessagePriority { + BackgroundEvent, + TaskProgress, + UserMessage, +} +enum MessageActor { + Agent, + User, +} + +impl From for String { + fn from(actor: MessageActor) -> Self { + match actor { + MessageActor::Agent => "codex".to_string(), + MessageActor::User => "user".to_string(), + } + } +} + +fn out(msg: &str, priority: MessagePriority, actor: MessageActor) { + let actor: String = actor.into(); + let style = match priority { + MessagePriority::BackgroundEvent => Style::new().fg_rgb::<127, 127, 127>(), + MessagePriority::TaskProgress => Style::new().fg_rgb::<200, 200, 200>(), + MessagePriority::UserMessage => Style::new().white(), + }; + + println!("{}> {}", actor.bold(), msg.style(style)); +} + +struct InputReader { + reader: Lines>, + ctrl_c: Arc, +} + +impl InputReader { + pub fn new(ctrl_c: Arc) -> Self { + Self { + reader: BufReader::new(tokio::io::stdin()).lines(), + ctrl_c, + } + } + + pub async fn request_input(&mut self) -> std::io::Result> { + print!("user> "); + stdout().flush()?; + let interrupted = self.ctrl_c.notified(); + tokio::select! { + line = self.reader.next_line() => { + match line? { + Some(input) => Ok(Some(input.trim().to_string())), + None => Ok(None), + } + } + _ = interrupted => { + println!(); + Ok(Some(String::new())) + } + } + } +} diff --git a/codex-rs/repl/src/main.rs b/codex-rs/repl/src/main.rs new file mode 100644 index 00000000..f6920794 --- /dev/null +++ b/codex-rs/repl/src/main.rs @@ -0,0 +1,11 @@ +use clap::Parser; +use codex_repl::run_main; +use codex_repl::Cli; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = Cli::parse(); + run_main(cli).await?; + + Ok(()) +} diff --git a/codex-rs/rustfmt.toml b/codex-rs/rustfmt.toml new file mode 100644 index 00000000..8d5c7406 --- /dev/null +++ b/codex-rs/rustfmt.toml @@ -0,0 +1,4 @@ +edition = "2021" +# The warnings caused by this setting can be ignored. +# See https://github.com/openai/openai/pull/298039 for details. +imports_granularity = "Item" diff --git a/codex-rs/tui/Cargo.toml b/codex-rs/tui/Cargo.toml new file mode 100644 index 00000000..ff7a50f6 --- /dev/null +++ b/codex-rs/tui/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "codex-tui" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "codex-tui" +path = "src/main.rs" + +[lib] +name = "codex_tui" +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +clap = { version = "4", features = ["derive"] } +codex-ansi-escape = { path = "../ansi-escape" } +codex-core = { path = "../core", features = ["cli"] } +color-eyre = "0.6.3" +crossterm = "0.28.1" +ratatui = { version = "0.29.0", features = [ + "unstable-widget-ref", + "unstable-rendered-line-info", +] } +shlex = "1.3.0" +tokio = { version = "1", features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } +tracing = { version = "0.1.41", features = ["log"] } +tracing-appender = "0.2.3" +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +tui-input = "0.11.1" +tui-textarea = "0.7.0" diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs new file mode 100644 index 00000000..9aba46ec --- /dev/null +++ b/codex-rs/tui/src/app.rs @@ -0,0 +1,194 @@ +use crate::app_event::AppEvent; +use crate::chatwidget::ChatWidget; +use crate::git_warning_screen::GitWarningOutcome; +use crate::git_warning_screen::GitWarningScreen; +use crate::tui; +use codex_core::protocol::AskForApproval; +use codex_core::protocol::Event; +use codex_core::protocol::Op; +use codex_core::protocol::SandboxPolicy; +use color_eyre::eyre::Result; +use crossterm::event::KeyCode; +use crossterm::event::KeyEvent; +use std::sync::mpsc::channel; +use std::sync::mpsc::Receiver; +use std::sync::mpsc::Sender; + +/// Top‑level application state – which full‑screen view is currently active. +enum AppState { + /// The main chat UI is visible. + Chat, + /// The start‑up warning that recommends running codex inside a Git repo. + GitWarning { screen: GitWarningScreen }, +} + +pub(crate) struct App<'a> { + app_event_tx: Sender, + app_event_rx: Receiver, + chat_widget: ChatWidget<'a>, + app_state: AppState, +} + +impl App<'_> { + pub(crate) fn new( + approval_policy: AskForApproval, + sandbox_policy: SandboxPolicy, + initial_prompt: Option, + show_git_warning: bool, + initial_images: Vec, + model: Option, + ) -> Self { + let (app_event_tx, app_event_rx) = channel(); + + // Spawn a dedicated thread for reading the crossterm event loop and + // re-publishing the events as AppEvents, as appropriate. + { + let app_event_tx = app_event_tx.clone(); + std::thread::spawn(move || { + while let Ok(event) = crossterm::event::read() { + let app_event = match event { + crossterm::event::Event::Key(key_event) => AppEvent::KeyEvent(key_event), + crossterm::event::Event::Resize(_, _) => AppEvent::Redraw, + crossterm::event::Event::FocusGained + | crossterm::event::Event::FocusLost + | crossterm::event::Event::Mouse(_) + | crossterm::event::Event::Paste(_) => { + continue; + } + }; + if let Err(e) = app_event_tx.send(app_event) { + tracing::error!("failed to send event: {e}"); + } + } + }); + } + + let chat_widget = ChatWidget::new( + approval_policy, + sandbox_policy, + app_event_tx.clone(), + initial_prompt.clone(), + initial_images, + model, + ); + + let app_state = if show_git_warning { + AppState::GitWarning { + screen: GitWarningScreen::new(), + } + } else { + AppState::Chat + }; + + Self { + app_event_tx, + app_event_rx, + chat_widget, + app_state, + } + } + + /// Clone of the internal event sender so external tasks (e.g. log bridge) + /// can inject `AppEvent`s. + pub fn event_sender(&self) -> Sender { + self.app_event_tx.clone() + } + + pub(crate) fn run(&mut self, terminal: &mut tui::Tui) -> Result<()> { + // Insert an event to trigger the first render. + let app_event_tx = self.app_event_tx.clone(); + app_event_tx.send(AppEvent::Redraw).unwrap(); + + while let Ok(event) = self.app_event_rx.recv() { + match event { + AppEvent::Redraw => { + self.draw_next_frame(terminal)?; + } + AppEvent::KeyEvent(key_event) => { + match key_event { + KeyEvent { + code: KeyCode::Char('c'), + modifiers: crossterm::event::KeyModifiers::CONTROL, + .. + } => { + self.chat_widget.submit_op(Op::Interrupt); + } + KeyEvent { + code: KeyCode::Char('d'), + modifiers: crossterm::event::KeyModifiers::CONTROL, + .. + } => { + self.app_event_tx.send(AppEvent::ExitRequest).unwrap(); + } + _ => { + self.dispatch_key_event(key_event); + } + }; + } + AppEvent::CodexEvent(event) => { + self.dispatch_codex_event(event); + } + AppEvent::ExitRequest => { + break; + } + AppEvent::CodexOp(op) => { + if matches!(self.app_state, AppState::Chat) { + self.chat_widget.submit_op(op); + } + } + AppEvent::LatestLog(line) => { + if matches!(self.app_state, AppState::Chat) { + let _ = self.chat_widget.update_latest_log(line); + } + } + } + } + terminal.clear()?; + + Ok(()) + } + + fn draw_next_frame(&mut self, terminal: &mut tui::Tui) -> Result<()> { + match &mut self.app_state { + AppState::Chat => { + terminal.draw(|frame| frame.render_widget_ref(&self.chat_widget, frame.area()))?; + } + AppState::GitWarning { screen } => { + terminal.draw(|frame| frame.render_widget_ref(&*screen, frame.area()))?; + } + } + Ok(()) + } + + /// Dispatch a KeyEvent to the current view and let it decide what to do + /// with it. + fn dispatch_key_event(&mut self, key_event: KeyEvent) { + match &mut self.app_state { + AppState::Chat => { + if let Err(e) = self.chat_widget.handle_key_event(key_event) { + tracing::error!("SendError: {e}"); + } + } + AppState::GitWarning { screen } => match screen.handle_key_event(key_event) { + GitWarningOutcome::Continue => { + self.app_state = AppState::Chat; + let _ = self.app_event_tx.send(AppEvent::Redraw); + } + GitWarningOutcome::Quit => { + let _ = self.app_event_tx.send(AppEvent::ExitRequest); + } + GitWarningOutcome::None => { + // do nothing + } + }, + } + } + + fn dispatch_codex_event(&mut self, event: Event) { + if matches!(self.app_state, AppState::Chat) { + if let Err(e) = self.chat_widget.handle_codex_event(event) { + tracing::error!("SendError: {e}"); + } + } + } +} diff --git a/codex-rs/tui/src/app_event.rs b/codex-rs/tui/src/app_event.rs new file mode 100644 index 00000000..bb8efb8e --- /dev/null +++ b/codex-rs/tui/src/app_event.rs @@ -0,0 +1,17 @@ +use codex_core::protocol::Event; +use crossterm::event::KeyEvent; + +pub(crate) enum AppEvent { + CodexEvent(Event), + Redraw, + KeyEvent(KeyEvent), + /// Request to exit the application gracefully. + ExitRequest, + + /// Forward an `Op` to the Agent. Using an `AppEvent` for this avoids + /// bubbling channels through layers of widgets. + CodexOp(codex_core::protocol::Op), + + /// Latest formatted log line emitted by `tracing`. + LatestLog(String), +} diff --git a/codex-rs/tui/src/bottom_pane.rs b/codex-rs/tui/src/bottom_pane.rs new file mode 100644 index 00000000..f2ebeaf2 --- /dev/null +++ b/codex-rs/tui/src/bottom_pane.rs @@ -0,0 +1,303 @@ +//! Bottom pane widget for the chat UI. +//! +//! This widget owns everything that is rendered in the terminal's lower +//! portion: either the multiline [`TextArea`] for user input or an active +//! [`UserApprovalWidget`] modal. All state and key-handling logic that is +//! specific to those UI elements lives here so that the parent +//! [`ChatWidget`] only has to forward events and render calls. + +use std::sync::mpsc::SendError; +use std::sync::mpsc::Sender; + +use crossterm::event::KeyEvent; +use ratatui::buffer::Buffer; +use ratatui::layout::Alignment; +use ratatui::layout::Rect; +use ratatui::style::Style; +use ratatui::style::Stylize; +use ratatui::text::Line; +use ratatui::widgets::BorderType; +use ratatui::widgets::Widget; +use ratatui::widgets::WidgetRef; +use tui_textarea::Input; +use tui_textarea::Key; +use tui_textarea::TextArea; + +use crate::app_event::AppEvent; +use crate::status_indicator_widget::StatusIndicatorWidget; +use crate::user_approval_widget::ApprovalRequest; +use crate::user_approval_widget::UserApprovalWidget; + +/// Minimum number of visible text rows inside the textarea. +const MIN_TEXTAREA_ROWS: usize = 3; +/// Number of terminal rows consumed by the textarea border (top + bottom). +const TEXTAREA_BORDER_LINES: u16 = 2; + +/// Result returned by [`BottomPane::handle_key_event`]. +pub enum InputResult { + /// The user pressed - the contained string is the message that + /// should be forwarded to the agent and appended to the conversation + /// history. + Submitted(String), + None, +} + +/// Internal state of the bottom pane. +/// +/// `ApprovalModal` owns a `current` widget that is guaranteed to exist while +/// this variant is active. Additional queued modals are stored in `queue`. +enum PaneState<'a> { + StatusIndicator { + view: StatusIndicatorWidget, + }, + TextInput, + ApprovalModal { + current: UserApprovalWidget<'a>, + queue: Vec>, + }, +} + +/// Everything that is drawn in the lower half of the chat UI. +pub(crate) struct BottomPane<'a> { + /// Multiline input widget (always kept around so its history/yank buffer + /// is preserved even while a modal is open). + textarea: TextArea<'a>, + + /// Current state (text input vs. approval modal). + state: PaneState<'a>, + + /// Channel used to notify the application that a redraw is required. + app_event_tx: Sender, + + has_input_focus: bool, + + is_task_running: bool, +} + +pub(crate) struct BottomPaneParams { + pub(crate) app_event_tx: Sender, + pub(crate) has_input_focus: bool, +} + +impl BottomPane<'_> { + pub fn new( + BottomPaneParams { + app_event_tx, + has_input_focus, + }: BottomPaneParams, + ) -> Self { + let mut textarea = TextArea::default(); + textarea.set_placeholder_text("send a message"); + textarea.set_cursor_line_style(Style::default()); + update_border_for_input_focus(&mut textarea, has_input_focus); + + Self { + textarea, + state: PaneState::TextInput, + app_event_tx, + has_input_focus, + is_task_running: false, + } + } + + /// Update the status indicator with the latest log line. Only effective + /// when the pane is currently in `StatusIndicator` mode. + pub(crate) fn update_status_text(&mut self, text: String) -> Result<(), SendError> { + if let PaneState::StatusIndicator { view } = &mut self.state { + view.update_text(text); + self.request_redraw()?; + } + Ok(()) + } + + pub(crate) fn set_input_focus(&mut self, has_input_focus: bool) { + self.has_input_focus = has_input_focus; + update_border_for_input_focus(&mut self.textarea, has_input_focus); + } + + /// Forward a key event to the appropriate child widget. + pub fn handle_key_event( + &mut self, + key_event: KeyEvent, + ) -> Result> { + match &mut self.state { + PaneState::StatusIndicator { view } => { + if view.handle_key_event(key_event)? { + self.request_redraw()?; + } + Ok(InputResult::None) + } + PaneState::ApprovalModal { current, queue } => { + // While in modal mode we always consume the Event. + current.handle_key_event(key_event)?; + + // If the modal has finished, either advance to the next one + // in the queue or fall back to the textarea. + if current.is_complete() { + if !queue.is_empty() { + // Replace `current` with the first queued modal and + // drop the old value. + *current = queue.remove(0); + } else if self.is_task_running { + let desired_height = { + let text_rows = self.textarea.lines().len().max(MIN_TEXTAREA_ROWS); + text_rows as u16 + TEXTAREA_BORDER_LINES + }; + + self.state = PaneState::StatusIndicator { + view: StatusIndicatorWidget::new( + self.app_event_tx.clone(), + desired_height, + ), + }; + } else { + self.state = PaneState::TextInput; + } + } + + // Always request a redraw while a modal is up to ensure the + // UI stays responsive. + self.request_redraw()?; + Ok(InputResult::None) + } + PaneState::TextInput => { + match key_event.into() { + Input { + key: Key::Enter, + shift: false, + alt: false, + ctrl: false, + } => { + let text = self.textarea.lines().join("\n"); + // Clear the textarea (there is no dedicated clear API). + self.textarea.select_all(); + self.textarea.cut(); + self.request_redraw()?; + Ok(InputResult::Submitted(text)) + } + input => { + self.textarea.input(input); + self.request_redraw()?; + Ok(InputResult::None) + } + } + } + } + } + + pub fn set_task_running(&mut self, is_task_running: bool) -> Result<(), SendError> { + self.is_task_running = is_task_running; + + match self.state { + PaneState::TextInput => { + if is_task_running { + self.state = PaneState::StatusIndicator { + view: StatusIndicatorWidget::new(self.app_event_tx.clone(), { + let text_rows = + self.textarea.lines().len().max(MIN_TEXTAREA_ROWS) as u16; + text_rows + TEXTAREA_BORDER_LINES + }), + }; + } else { + return Ok(()); + } + } + PaneState::StatusIndicator { .. } => { + if is_task_running { + return Ok(()); + } else { + self.state = PaneState::TextInput; + } + } + PaneState::ApprovalModal { .. } => { + // Do not change state if a modal is showing. + return Ok(()); + } + } + + self.request_redraw()?; + Ok(()) + } + + /// Enqueue a new approval request coming from the agent. + /// + /// Returns `true` when this is the *first* modal - in that case the caller + /// should trigger a redraw so that the modal becomes visible. + pub fn push_approval_request(&mut self, request: ApprovalRequest) -> bool { + let widget = UserApprovalWidget::new(request, self.app_event_tx.clone()); + + match &mut self.state { + PaneState::StatusIndicator { .. } => { + self.state = PaneState::ApprovalModal { + current: widget, + queue: Vec::new(), + }; + true // Needs redraw so the modal appears. + } + PaneState::TextInput => { + // Transition to modal state with an empty queue. + self.state = PaneState::ApprovalModal { + current: widget, + queue: Vec::new(), + }; + true // Needs redraw so the modal appears. + } + PaneState::ApprovalModal { queue, .. } => { + queue.push(widget); + false // Already in modal mode - no redraw required. + } + } + } + + fn request_redraw(&self) -> Result<(), SendError> { + self.app_event_tx.send(AppEvent::Redraw) + } + + /// Height (terminal rows) required to render the pane in its current + /// state (modal or textarea). + pub fn required_height(&self, area: &Rect) -> u16 { + match &self.state { + PaneState::StatusIndicator { view } => view.get_height(), + PaneState::ApprovalModal { current, .. } => current.get_height(area), + PaneState::TextInput => { + let text_rows = self.textarea.lines().len(); + std::cmp::max(text_rows, MIN_TEXTAREA_ROWS) as u16 + TEXTAREA_BORDER_LINES + } + } + } +} + +impl WidgetRef for &BottomPane<'_> { + fn render_ref(&self, area: Rect, buf: &mut Buffer) { + match &self.state { + PaneState::StatusIndicator { view } => view.render_ref(area, buf), + PaneState::ApprovalModal { current, .. } => current.render(area, buf), + PaneState::TextInput => self.textarea.render(area, buf), + } + } +} + +fn update_border_for_input_focus(textarea: &mut TextArea, has_input_focus: bool) { + let (title, border_style) = if has_input_focus { + ( + "use Enter to send for now (Ctrl‑D to quit)", + Style::default().dim(), + ) + } else { + ("", Style::default()) + }; + let right_title = if has_input_focus { + Line::from("press enter to send").alignment(Alignment::Right) + } else { + Line::from("") + }; + + textarea.set_block( + ratatui::widgets::Block::default() + .title_bottom(title) + .title_bottom(right_title) + .borders(ratatui::widgets::Borders::ALL) + .border_type(BorderType::Rounded) + .border_style(border_style), + ); +} diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs new file mode 100644 index 00000000..149cea42 --- /dev/null +++ b/codex-rs/tui/src/chatwidget.rs @@ -0,0 +1,387 @@ +use std::sync::mpsc::SendError; +use std::sync::mpsc::Sender; +use std::sync::Arc; + +use codex_core::codex_wrapper::init_codex; +use codex_core::protocol::AskForApproval; +use codex_core::protocol::Event; +use codex_core::protocol::EventMsg; +use codex_core::protocol::InputItem; +use codex_core::protocol::Op; +use codex_core::protocol::SandboxPolicy; +use crossterm::event::KeyEvent; +use ratatui::buffer::Buffer; +use ratatui::layout::Constraint; +use ratatui::layout::Direction; +use ratatui::layout::Layout; +use ratatui::layout::Rect; +use ratatui::widgets::Widget; +use ratatui::widgets::WidgetRef; +use tokio::sync::mpsc::unbounded_channel; +use tokio::sync::mpsc::UnboundedSender; + +use crate::app_event::AppEvent; +use crate::bottom_pane::BottomPane; +use crate::bottom_pane::BottomPaneParams; +use crate::bottom_pane::InputResult; +use crate::conversation_history_widget::ConversationHistoryWidget; +use crate::history_cell::PatchEventType; +use crate::user_approval_widget::ApprovalRequest; + +pub(crate) struct ChatWidget<'a> { + app_event_tx: Sender, + codex_op_tx: UnboundedSender, + conversation_history: ConversationHistoryWidget, + bottom_pane: BottomPane<'a>, + input_focus: InputFocus, + approval_policy: AskForApproval, + cwd: std::path::PathBuf, +} + +#[derive(Clone, Copy, Eq, PartialEq)] +enum InputFocus { + HistoryPane, + BottomPane, +} + +impl ChatWidget<'_> { + pub(crate) fn new( + approval_policy: AskForApproval, + sandbox_policy: SandboxPolicy, + app_event_tx: Sender, + initial_prompt: Option, + initial_images: Vec, + model: Option, + ) -> Self { + let (codex_op_tx, mut codex_op_rx) = unbounded_channel::(); + + // Determine the current working directory up‑front so we can display + // it alongside the Session information when the session is + // initialised. + let cwd = std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from(".")); + + let app_event_tx_clone = app_event_tx.clone(); + // Create the Codex asynchronously so the UI loads as quickly as possible. + tokio::spawn(async move { + let (codex, session_event, _ctrl_c) = + match init_codex(approval_policy, sandbox_policy, model).await { + Ok(vals) => vals, + Err(e) => { + // TODO(mbolin): This error needs to be surfaced to the user. + tracing::error!("failed to initialize codex: {e}"); + return; + } + }; + + // Forward the captured `SessionInitialized` event that was consumed + // inside `init_codex()` so it can be rendered in the UI. + if let Err(e) = app_event_tx_clone.send(AppEvent::CodexEvent(session_event.clone())) { + tracing::error!("failed to send SessionInitialized event: {e}"); + } + let codex = Arc::new(codex); + let codex_clone = codex.clone(); + tokio::spawn(async move { + while let Some(op) = codex_op_rx.recv().await { + let id = codex_clone.submit(op).await; + if let Err(e) = id { + tracing::error!("failed to submit op: {e}"); + } + } + }); + + while let Ok(event) = codex.next_event().await { + app_event_tx_clone + .send(AppEvent::CodexEvent(event)) + .unwrap_or_else(|e| { + tracing::error!("failed to send event: {e}"); + }); + } + }); + + let mut chat_widget = Self { + app_event_tx: app_event_tx.clone(), + codex_op_tx, + conversation_history: ConversationHistoryWidget::new(), + bottom_pane: BottomPane::new(BottomPaneParams { + app_event_tx, + has_input_focus: true, + }), + input_focus: InputFocus::BottomPane, + approval_policy, + cwd: cwd.clone(), + }; + + let _ = chat_widget.submit_welcome_message(); + + if initial_prompt.is_some() || !initial_images.is_empty() { + let text = initial_prompt.unwrap_or_default(); + let _ = chat_widget.submit_user_message_with_images(text, initial_images); + } + + chat_widget + } + + pub(crate) fn handle_key_event( + &mut self, + key_event: KeyEvent, + ) -> std::result::Result<(), SendError> { + // Special-case : does not get dispatched to child components. + if matches!(key_event.code, crossterm::event::KeyCode::Tab) { + self.input_focus = match self.input_focus { + InputFocus::HistoryPane => InputFocus::BottomPane, + InputFocus::BottomPane => InputFocus::HistoryPane, + }; + self.conversation_history + .set_input_focus(self.input_focus == InputFocus::HistoryPane); + self.bottom_pane + .set_input_focus(self.input_focus == InputFocus::BottomPane); + self.request_redraw()?; + return Ok(()); + } + + match self.input_focus { + InputFocus::HistoryPane => { + let needs_redraw = self.conversation_history.handle_key_event(key_event); + if needs_redraw { + self.request_redraw()?; + } + Ok(()) + } + InputFocus::BottomPane => { + match self.bottom_pane.handle_key_event(key_event)? { + InputResult::Submitted(text) => { + // Special client‑side commands start with a leading slash. + let trimmed = text.trim(); + + match trimmed { + "q" => { + // Gracefully request application shutdown. + let _ = self.app_event_tx.send(AppEvent::ExitRequest); + } + "/clear" => { + // Clear the current conversation history without exiting. + self.conversation_history.clear(); + self.request_redraw()?; + } + _ => { + self.submit_user_message(text)?; + } + } + } + InputResult::None => {} + } + Ok(()) + } + } + } + + fn submit_welcome_message(&mut self) -> std::result::Result<(), SendError> { + self.handle_codex_event(Event { + id: "welcome".to_string(), + msg: EventMsg::AgentMessage { + message: "Welcome to codex!".to_string(), + }, + })?; + Ok(()) + } + + fn submit_user_message( + &mut self, + text: String, + ) -> std::result::Result<(), SendError> { + // Forward to codex and update conversation history. + self.submit_user_message_with_images(text, vec![]) + } + + fn submit_user_message_with_images( + &mut self, + text: String, + image_paths: Vec, + ) -> std::result::Result<(), SendError> { + let mut items: Vec = Vec::new(); + + if !text.is_empty() { + items.push(InputItem::Text { text: text.clone() }); + } + + for path in image_paths { + items.push(InputItem::LocalImage { path }); + } + + if items.is_empty() { + return Ok(()); + } + + self.codex_op_tx + .send(Op::UserInput { items }) + .unwrap_or_else(|e| { + tracing::error!("failed to send message: {e}"); + }); + + // Only show text portion in conversation history for now. + if !text.is_empty() { + self.conversation_history.add_user_message(text); + } + self.conversation_history.scroll_to_bottom(); + + Ok(()) + } + + pub(crate) fn handle_codex_event( + &mut self, + event: Event, + ) -> std::result::Result<(), SendError> { + let Event { id, msg } = event; + match msg { + EventMsg::SessionConfigured { model } => { + // Record session information at the top of the conversation. + self.conversation_history.add_session_info( + model, + self.cwd.clone(), + self.approval_policy, + ); + self.request_redraw()?; + } + EventMsg::AgentMessage { message } => { + self.conversation_history.add_agent_message(message); + self.request_redraw()?; + } + EventMsg::TaskStarted => { + self.bottom_pane.set_task_running(true)?; + self.conversation_history + .add_background_event(format!("task {id} started")); + self.request_redraw()?; + } + EventMsg::TaskComplete => { + self.bottom_pane.set_task_running(false)?; + self.request_redraw()?; + } + EventMsg::Error { message } => { + self.conversation_history + .add_background_event(format!("Error: {message}")); + self.bottom_pane.set_task_running(false)?; + } + EventMsg::ExecApprovalRequest { + command, + cwd, + reason, + } => { + let request = ApprovalRequest::Exec { + id, + command, + cwd, + reason, + }; + let needs_redraw = self.bottom_pane.push_approval_request(request); + if needs_redraw { + self.request_redraw()?; + } + } + EventMsg::ApplyPatchApprovalRequest { + changes, + reason, + grant_root, + } => { + // ------------------------------------------------------------------ + // Before we even prompt the user for approval we surface the patch + // summary in the main conversation so that the dialog appears in a + // sensible chronological order: + // (1) codex → proposes patch (HistoryCell::PendingPatch) + // (2) UI → asks for approval (BottomPane) + // This mirrors how command execution is shown (command begins → + // approval dialog) and avoids surprising the user with a modal + // prompt before they have seen *what* is being requested. + // ------------------------------------------------------------------ + + self.conversation_history + .add_patch_event(PatchEventType::ApprovalRequest, changes); + + self.conversation_history.scroll_to_bottom(); + + // Now surface the approval request in the BottomPane as before. + let request = ApprovalRequest::ApplyPatch { + id, + reason, + grant_root, + }; + let _needs_redraw = self.bottom_pane.push_approval_request(request); + // Redraw is always need because the history has changed. + self.request_redraw()?; + } + EventMsg::ExecCommandBegin { + call_id, command, .. + } => { + self.conversation_history + .add_active_exec_command(call_id, command); + self.request_redraw()?; + } + EventMsg::PatchApplyBegin { + call_id: _, + auto_approved, + changes, + } => { + // Even when a patch is auto‑approved we still display the + // summary so the user can follow along. + self.conversation_history + .add_patch_event(PatchEventType::ApplyBegin { auto_approved }, changes); + if !auto_approved { + self.conversation_history.scroll_to_bottom(); + } + self.request_redraw()?; + } + EventMsg::ExecCommandEnd { + call_id, + exit_code, + stdout, + stderr, + .. + } => { + self.conversation_history + .record_completed_exec_command(call_id, stdout, stderr, exit_code); + self.request_redraw()?; + } + event => { + self.conversation_history + .add_background_event(format!("{event:?}")); + self.request_redraw()?; + } + } + Ok(()) + } + + /// Update the live log preview while a task is running. + pub(crate) fn update_latest_log( + &mut self, + line: String, + ) -> std::result::Result<(), std::sync::mpsc::SendError> { + // Forward only if we are currently showing the status indicator. + self.bottom_pane.update_status_text(line)?; + Ok(()) + } + + fn request_redraw(&mut self) -> std::result::Result<(), SendError> { + self.app_event_tx.send(AppEvent::Redraw)?; + Ok(()) + } + + /// Forward an `Op` directly to codex. + pub(crate) fn submit_op(&self, op: Op) { + if let Err(e) = self.codex_op_tx.send(op) { + tracing::error!("failed to submit op: {e}"); + } + } +} + +impl WidgetRef for &ChatWidget<'_> { + fn render_ref(&self, area: Rect, buf: &mut Buffer) { + let bottom_height = self.bottom_pane.required_height(&area); + + let chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Min(0), Constraint::Length(bottom_height)]) + .split(area); + + self.conversation_history.render(chunks[0], buf); + (&self.bottom_pane).render(chunks[1], buf); + } +} diff --git a/codex-rs/tui/src/cli.rs b/codex-rs/tui/src/cli.rs new file mode 100644 index 00000000..fa764d1a --- /dev/null +++ b/codex-rs/tui/src/cli.rs @@ -0,0 +1,41 @@ +use clap::Parser; +use codex_core::ApprovalModeCliArg; +use codex_core::SandboxModeCliArg; +use std::path::PathBuf; + +#[derive(Parser, Debug)] +#[command(version)] +pub struct Cli { + /// Optional user prompt to start the session. + pub prompt: Option, + + /// Optional image(s) to attach to the initial prompt. + #[arg(long = "image", short = 'i', value_name = "FILE", value_delimiter = ',', num_args = 1..)] + pub images: Vec, + + /// Model the agent should use. + #[arg(long, short = 'm')] + pub model: Option, + + /// Configure when the model requires human approval before executing a command. + #[arg(long = "ask-for-approval", short = 'a', value_enum, default_value_t = ApprovalModeCliArg::OnFailure)] + pub approval_policy: ApprovalModeCliArg, + + /// Configure the process restrictions when a command is executed. + /// + /// Uses OS-specific sandboxing tools; Seatbelt on OSX, landlock+seccomp on Linux. + #[arg(long = "sandbox", short = 's', value_enum, default_value_t = SandboxModeCliArg::NetworkAndFileWriteRestricted)] + pub sandbox_policy: SandboxModeCliArg, + + /// Allow running Codex outside a Git repository. + #[arg(long = "skip-git-repo-check", default_value_t = false)] + pub skip_git_repo_check: bool, + + /// Convenience alias for low-friction sandboxed automatic execution (-a on-failure, -s network-and-file-write-restricted) + #[arg(long = "full-auto", default_value_t = true)] + pub full_auto: bool, + + /// Convenience alias for supervised sandboxed execution (-a unless-allow-listed, -s network-and-file-write-restricted) + #[arg(long = "suggest", default_value_t = false)] + pub suggest: bool, +} diff --git a/codex-rs/tui/src/conversation_history_widget.rs b/codex-rs/tui/src/conversation_history_widget.rs new file mode 100644 index 00000000..c8f69061 --- /dev/null +++ b/codex-rs/tui/src/conversation_history_widget.rs @@ -0,0 +1,379 @@ +use crate::history_cell::CommandOutput; +use crate::history_cell::HistoryCell; +use crate::history_cell::PatchEventType; +use codex_core::protocol::FileChange; +use crossterm::event::KeyCode; +use crossterm::event::KeyEvent; +use ratatui::prelude::*; +use ratatui::style::Style; +use ratatui::widgets::*; +use std::cell::Cell as StdCell; +use std::collections::HashMap; +use std::path::PathBuf; + +pub struct ConversationHistoryWidget { + history: Vec, + scroll_position: usize, + /// Number of lines the last time render_ref() was called + num_rendered_lines: StdCell, + /// The height of the viewport last time render_ref() was called + last_viewport_height: StdCell, + has_input_focus: bool, +} + +impl ConversationHistoryWidget { + pub fn new() -> Self { + Self { + history: Vec::new(), + scroll_position: usize::MAX, + num_rendered_lines: StdCell::new(0), + last_viewport_height: StdCell::new(0), + has_input_focus: false, + } + } + + pub(crate) fn set_input_focus(&mut self, has_input_focus: bool) { + self.has_input_focus = has_input_focus; + } + + /// Returns true if it needs a redraw. + pub(crate) fn handle_key_event(&mut self, key_event: KeyEvent) -> bool { + match key_event.code { + KeyCode::Up | KeyCode::Char('k') => { + self.scroll_up(); + true + } + KeyCode::Down | KeyCode::Char('j') => { + self.scroll_down(); + true + } + KeyCode::PageUp | KeyCode::Char('b') | KeyCode::Char('u') | KeyCode::Char('U') => { + self.scroll_page_up(); + true + } + KeyCode::PageDown | KeyCode::Char(' ') | KeyCode::Char('d') | KeyCode::Char('D') => { + self.scroll_page_down(); + true + } + _ => false, + } + } + + fn scroll_up(&mut self) { + // If a user is scrolling up from the "stick to bottom" mode, we + // need to scroll them back such that they move just one line up. + // This requires us to care about how tall the screen is. + if self.scroll_position == usize::MAX { + self.scroll_position = self + .num_rendered_lines + .get() + .saturating_sub(self.last_viewport_height.get()); + } + + self.scroll_position = self.scroll_position.saturating_sub(1); + } + + fn scroll_down(&mut self) { + // If we're already pinned to the bottom there's nothing to do. + if self.scroll_position == usize::MAX { + return; + } + + let viewport_height = self.last_viewport_height.get().max(1); + let num_lines = self.num_rendered_lines.get(); + + // Compute the maximum explicit scroll offset that still shows a full + // viewport. This mirrors the calculation in `scroll_page_down()` and + // in the render path. + let max_scroll = num_lines.saturating_sub(viewport_height).saturating_add(1); + + let new_pos = self.scroll_position.saturating_add(1); + + if new_pos >= max_scroll { + // Reached (or passed) the bottom – switch to stick‑to‑bottom mode + // so that additional output keeps the view pinned automatically. + self.scroll_position = usize::MAX; + } else { + self.scroll_position = new_pos; + } + } + + /// Scroll up by one full viewport height (Page Up). + fn scroll_page_up(&mut self) { + let viewport_height = self.last_viewport_height.get().max(1); + + // If we are currently in the "stick to bottom" mode, first convert the + // implicit scroll position (`usize::MAX`) into an explicit offset that + // represents the very bottom of the scroll region. This mirrors the + // logic from `scroll_up()`. + if self.scroll_position == usize::MAX { + self.scroll_position = self + .num_rendered_lines + .get() + .saturating_sub(viewport_height); + } + + // Move up by a full page. + self.scroll_position = self.scroll_position.saturating_sub(viewport_height); + } + + /// Scroll down by one full viewport height (Page Down). + fn scroll_page_down(&mut self) { + // Nothing to do if we're already stuck to the bottom. + if self.scroll_position == usize::MAX { + return; + } + + let viewport_height = self.last_viewport_height.get().max(1); + let num_lines = self.num_rendered_lines.get(); + + // Calculate the maximum explicit scroll offset that is still within + // range. This matches the logic in `scroll_down()` and the render + // method. + let max_scroll = num_lines.saturating_sub(viewport_height).saturating_add(1); + + // Attempt to move down by a full page. + let new_pos = self.scroll_position.saturating_add(viewport_height); + + if new_pos >= max_scroll { + // We have reached (or passed) the bottom – switch back to + // automatic stick‑to‑bottom mode so that subsequent output keeps + // the viewport pinned. + self.scroll_position = usize::MAX; + } else { + self.scroll_position = new_pos; + } + } + + pub fn scroll_to_bottom(&mut self) { + self.scroll_position = usize::MAX; + } + + pub fn add_user_message(&mut self, message: String) { + self.add_to_history(HistoryCell::new_user_prompt(message)); + } + + pub fn add_agent_message(&mut self, message: String) { + self.add_to_history(HistoryCell::new_agent_message(message)); + } + + pub fn add_background_event(&mut self, message: String) { + self.add_to_history(HistoryCell::new_background_event(message)); + } + + /// Add a pending patch entry (before user approval). + pub fn add_patch_event( + &mut self, + event_type: PatchEventType, + changes: HashMap, + ) { + self.add_to_history(HistoryCell::new_patch_event(event_type, changes)); + } + + pub fn add_session_info( + &mut self, + model: String, + cwd: std::path::PathBuf, + approval_policy: codex_core::protocol::AskForApproval, + ) { + self.add_to_history(HistoryCell::new_session_info(model, cwd, approval_policy)); + } + + pub fn add_active_exec_command(&mut self, call_id: String, command: Vec) { + self.add_to_history(HistoryCell::new_active_exec_command(call_id, command)); + } + + fn add_to_history(&mut self, cell: HistoryCell) { + self.history.push(cell); + } + + /// Remove all history entries and reset scrolling. + pub fn clear(&mut self) { + self.history.clear(); + self.scroll_position = usize::MAX; + } + + pub fn record_completed_exec_command( + &mut self, + call_id: String, + stdout: String, + stderr: String, + exit_code: i32, + ) { + for cell in self.history.iter_mut() { + if let HistoryCell::ActiveExecCommand { + call_id: history_id, + command, + start, + .. + } = cell + { + if &call_id == history_id { + *cell = HistoryCell::new_completed_exec_command( + command.clone(), + CommandOutput { + exit_code, + stdout, + stderr, + duration: start.elapsed(), + }, + ); + break; + } + } + } + } +} + +impl WidgetRef for ConversationHistoryWidget { + fn render_ref(&self, area: Rect, buf: &mut Buffer) { + let (title, border_style) = if self.has_input_focus { + ( + "Messages (↑/↓ or j/k = line, b/u = PgUp, space/d = PgDn)", + Style::default().fg(Color::LightYellow), + ) + } else { + ("Messages (tab to focus)", Style::default().dim()) + }; + + let block = Block::default() + .title(title) + .borders(Borders::ALL) + .border_type(BorderType::Rounded) + .border_style(border_style); + + // ------------------------------------------------------------------ + // Build a *window* into the history instead of cloning the entire + // history into a brand‑new Vec every time we are asked to render. + // + // There can be an unbounded number of `Line` objects in the history, + // but the terminal will only ever display `height` of them at once. + // By materialising only the `height` lines that are scrolled into + // view we avoid the potentially expensive clone of the full + // conversation every frame. + // ------------------------------------------------------------------ + + // Compute the inner area that will be available for the list after + // the surrounding `Block` is drawn. + let inner = block.inner(area); + let viewport_height = inner.height as usize; + + // Collect the lines that will actually be visible in the viewport + // while keeping track of the total number of lines so the scrollbar + // stays correct. + let num_lines: usize = self.history.iter().map(|c| c.lines().len()).sum(); + + let max_scroll = num_lines.saturating_sub(viewport_height) + 1; + let scroll_pos = if self.scroll_position == usize::MAX { + max_scroll + } else { + self.scroll_position.min(max_scroll) + }; + + let mut visible_lines: Vec> = Vec::with_capacity(viewport_height); + + if self.scroll_position == usize::MAX { + // Stick‑to‑bottom mode: walk the history backwards and keep the + // most recent `height` lines. This touches at most `height` + // lines regardless of how large the conversation grows. + 'outer_rev: for cell in self.history.iter().rev() { + for line in cell.lines().iter().rev() { + visible_lines.push(line.clone()); + if visible_lines.len() == viewport_height { + break 'outer_rev; + } + } + } + visible_lines.reverse(); + } else { + // Arbitrary scroll position. Skip lines until we reach the + // desired offset, then emit the next `height` lines. + let start_line = scroll_pos; + let mut current_index = 0usize; + 'outer_fwd: for cell in &self.history { + for line in cell.lines() { + if current_index >= start_line { + visible_lines.push(line.clone()); + if visible_lines.len() == viewport_height { + break 'outer_fwd; + } + } + current_index += 1; + } + } + } + + // We track the number of lines in the struct so can let the user take over from + // something other than usize::MAX when they start scrolling up. This could be + // removed once we have the vec in self. + self.num_rendered_lines.set(num_lines); + self.last_viewport_height.set(viewport_height); + + // The widget takes care of drawing the `block` and computing its own + // inner area, so we render it over the full `area`. + // We *manually* sliced the set of `visible_lines` to fit within the + // viewport above, so there is no need to ask the `Paragraph` widget + // to apply an additional scroll offset. Doing so would cause the + // content to be shifted *twice* – once by our own logic and then a + // second time by the widget – which manifested as the entire block + // drifting off‑screen when the user attempted to scroll. + + let paragraph = Paragraph::new(visible_lines) + .block(block) + .wrap(Wrap { trim: false }); + paragraph.render(area, buf); + + let needs_scrollbar = num_lines > viewport_height; + if needs_scrollbar { + let mut scroll_state = ScrollbarState::default() + // TODO(ragona): + // I don't totally understand this, but it appears to work exactly as expected + // if we set the content length as the lines minus the height. Maybe I was supposed + // to use viewport_content_length or something, but this works and I'm backing away. + .content_length(num_lines.saturating_sub(viewport_height)) + .position(scroll_pos); + + // Choose a thumb colour that stands out only when this pane has focus so that the + // user’s attention is naturally drawn to the active viewport. When unfocused we show + // a low‑contrast thumb so the scrollbar fades into the background without becoming + // invisible. + + let thumb_style = if self.has_input_focus { + Style::reset().fg(Color::LightYellow) + } else { + Style::reset().fg(Color::Gray) + }; + + StatefulWidget::render( + // By default the Scrollbar widget inherits the style that was already present + // in the underlying buffer cells. That means if a coloured line (for example a + // background task notification that we render in blue) happens to be underneath + // the scrollbar, the track and thumb adopt that colour and the scrollbar appears + // to “change colour”. Explicitly setting the *track* and *thumb* styles ensures + // we always draw the scrollbar with the same palette regardless of what content + // is behind it. + // + // N.B. Only the *foreground* colour matters here because the scrollbar symbols + // themselves are filled‐in block glyphs that completely overwrite the prior + // character cells. We therefore leave the background at its default value so it + // blends nicely with the surrounding `Block`. + Scrollbar::new(ScrollbarOrientation::VerticalRight) + .begin_symbol(Some("↑")) + .end_symbol(Some("↓")) + .begin_style(Style::reset().fg(Color::DarkGray)) + .end_style(Style::reset().fg(Color::DarkGray)) + // A solid thumb so that we can colour it distinctly from the track. + .thumb_symbol("█") + // Apply the dynamic thumb colour computed above. We still start from + // Style::reset() to clear any inherited modifiers. + .thumb_style(thumb_style) + // Thin vertical line for the track. + .track_symbol(Some("│")) + .track_style(Style::reset().fg(Color::DarkGray)), + inner, + buf, + &mut scroll_state, + ); + } + } +} diff --git a/codex-rs/tui/src/exec_command.rs b/codex-rs/tui/src/exec_command.rs new file mode 100644 index 00000000..35c59b22 --- /dev/null +++ b/codex-rs/tui/src/exec_command.rs @@ -0,0 +1,62 @@ +use std::path::Path; +use std::path::PathBuf; + +use shlex::try_join; + +pub(crate) fn escape_command(command: &[String]) -> String { + try_join(command.iter().map(|s| s.as_str())).unwrap_or_else(|_| command.join(" ")) +} + +pub(crate) fn strip_bash_lc_and_escape(command: &[String]) -> String { + match command { + // exactly three items + [first, second, third] + // first two must be "bash", "-lc" + if first == "bash" && second == "-lc" => + { + third.clone() // borrow `third` + } + _ => escape_command(command), + } +} + +/// If `path` is absolute and inside $HOME, return the part *after* the home +/// directory; otherwise, return the path as-is. Note if `path` is the homedir, +/// this will return and empty path. +pub(crate) fn relativize_to_home

(path: P) -> Option +where + P: AsRef, +{ + let path = path.as_ref(); + if !path.is_absolute() { + // If the path is not absolute, we can’t do anything with it. + return None; + } + + if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from) { + if let Ok(rel) = path.strip_prefix(&home_dir) { + return Some(rel.to_path_buf()); + } + } + + None +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_escape_command() { + let args = vec!["foo".into(), "bar baz".into(), "weird&stuff".into()]; + let cmdline = escape_command(&args); + assert_eq!(cmdline, "foo 'bar baz' 'weird&stuff'"); + } + + #[test] + fn test_strip_bash_lc_and_escape() { + let args = vec!["bash".into(), "-lc".into(), "echo hello".into()]; + let cmdline = strip_bash_lc_and_escape(&args); + assert_eq!(cmdline, "echo hello"); + } +} diff --git a/codex-rs/tui/src/git_warning_screen.rs b/codex-rs/tui/src/git_warning_screen.rs new file mode 100644 index 00000000..3a7ea211 --- /dev/null +++ b/codex-rs/tui/src/git_warning_screen.rs @@ -0,0 +1,122 @@ +//! Full‑screen warning displayed when Codex is started outside a Git +//! repository (unless the user passed `--allow-no-git-exec`). The screen +//! blocks all input until the user explicitly decides whether to continue or +//! quit. + +use crossterm::event::KeyCode; +use crossterm::event::KeyEvent; +use ratatui::buffer::Buffer; +use ratatui::layout::Alignment; +use ratatui::layout::Constraint; +use ratatui::layout::Direction; +use ratatui::layout::Layout; +use ratatui::layout::Rect; +use ratatui::style::Color; +use ratatui::style::Modifier; +use ratatui::style::Style; +use ratatui::text::Span; +use ratatui::widgets::Block; +use ratatui::widgets::BorderType; +use ratatui::widgets::Borders; +use ratatui::widgets::Paragraph; +use ratatui::widgets::Widget; +use ratatui::widgets::WidgetRef; +use ratatui::widgets::Wrap; + +const NO_GIT_ERROR: &str = "We recommend running codex inside a git repository. \ +This helps ensure that changes can be tracked and easily rolled back if necessary. \ +Do you wish to proceed?"; + +/// Result of handling a key event while the warning screen is active. +pub(crate) enum GitWarningOutcome { + /// User chose to proceed – switch to the main Chat UI. + Continue, + /// User opted to quit the application. + Quit, + /// No actionable key was pressed – stay on the warning screen. + None, +} + +pub(crate) struct GitWarningScreen; + +impl GitWarningScreen { + pub(crate) fn new() -> Self { + Self + } + + /// Handle a key event, returning an outcome indicating whether the user + /// chose to continue, quit, or neither. + pub(crate) fn handle_key_event(&self, key_event: KeyEvent) -> GitWarningOutcome { + match key_event.code { + KeyCode::Char('y') | KeyCode::Char('Y') => GitWarningOutcome::Continue, + KeyCode::Char('n') | KeyCode::Char('q') | KeyCode::Esc => GitWarningOutcome::Quit, + _ => GitWarningOutcome::None, + } + } +} + +impl WidgetRef for &GitWarningScreen { + fn render_ref(&self, area: Rect, buf: &mut Buffer) { + const MIN_WIDTH: u16 = 35; + const MIN_HEIGHT: u16 = 15; + // Check if the available area is too small for our popup. + if area.width < MIN_WIDTH || area.height < MIN_HEIGHT { + // Fallback rendering: a simple abbreviated message that fits the available area. + let fallback_message = Paragraph::new(NO_GIT_ERROR) + .wrap(Wrap { trim: true }) + .alignment(Alignment::Center); + fallback_message.render(area, buf); + return; + } + + // Determine the popup (modal) size – aim for 60 % width, 30 % height + // but keep a sensible minimum so the content is always readable. + let popup_width = std::cmp::max(MIN_WIDTH, (area.width as f32 * 0.6) as u16); + let popup_height = std::cmp::max(MIN_HEIGHT, (area.height as f32 * 0.3) as u16); + + // Center the popup in the available area. + let popup_x = area.x + (area.width.saturating_sub(popup_width)) / 2; + let popup_y = area.y + (area.height.saturating_sub(popup_height)) / 2; + let popup_area = Rect::new(popup_x, popup_y, popup_width, popup_height); + + // The modal block that contains everything. + let popup_block = Block::default() + .borders(Borders::ALL) + .border_type(BorderType::Plain) + .title(Span::styled( + "Warning: Not a Git repository", // bold warning title + Style::default().add_modifier(Modifier::BOLD).fg(Color::Red), + )); + + // Obtain the inner area before rendering (render consumes the block). + let inner = popup_block.inner(popup_area); + popup_block.render(popup_area, buf); + + // Split the inner area vertically into two boxes: one for the warning + // explanation, one for the user action instructions. + let chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Min(3), Constraint::Length(3)]) + .split(inner); + + // ----- First box: detailed warning text -------------------------------- + let text_block = Block::default().borders(Borders::ALL); + let text_inner = text_block.inner(chunks[0]); + text_block.render(chunks[0], buf); + + let warning_paragraph = Paragraph::new(NO_GIT_ERROR) + .wrap(Wrap { trim: true }) + .alignment(Alignment::Left); + warning_paragraph.render(text_inner, buf); + + // ----- Second box: "proceed? y/n" instructions -------------------------- + let action_block = Block::default().borders(Borders::ALL); + let action_inner = action_block.inner(chunks[1]); + action_block.render(chunks[1], buf); + + let action_text = Paragraph::new("press 'y' to continue, 'n' to quit") + .alignment(Alignment::Center) + .style(Style::default().add_modifier(Modifier::BOLD)); + action_text.render(action_inner, buf); + } +} diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs new file mode 100644 index 00000000..d6ebc248 --- /dev/null +++ b/codex-rs/tui/src/history_cell.rs @@ -0,0 +1,271 @@ +use codex_ansi_escape::ansi_escape_line; +use codex_core::protocol::FileChange; +use ratatui::prelude::*; +use ratatui::style::Color; +use ratatui::style::Modifier; +use ratatui::style::Style; +use ratatui::text::Line as RtLine; +use ratatui::text::Span as RtSpan; +use std::collections::HashMap; +use std::path::PathBuf; +use std::time::Duration; +use std::time::Instant; + +use crate::exec_command::escape_command; + +pub(crate) struct CommandOutput { + pub(crate) exit_code: i32, + pub(crate) stdout: String, + pub(crate) stderr: String, + pub(crate) duration: Duration, +} + +pub(crate) enum PatchEventType { + ApprovalRequest, + ApplyBegin { auto_approved: bool }, +} + +/// Represents an event to display in the conversation history. Returns its +/// `Vec>` representation to make it easier to display in a +/// scrollable list. +pub(crate) enum HistoryCell { + /// Message from the user. + UserPrompt { lines: Vec> }, + + /// Message from the agent. + AgentMessage { lines: Vec> }, + + /// An exec tool call that has not finished yet. + ActiveExecCommand { + call_id: String, + /// The shell command, escaped and formatted. + command: String, + start: Instant, + lines: Vec>, + }, + + /// Completed exec tool call. + CompletedExecCommand { lines: Vec> }, + + /// Background event + BackgroundEvent { lines: Vec> }, + + /// Info describing the newly‑initialized session. + SessionInfo { lines: Vec> }, + + /// A pending code patch that is awaiting user approval. Mirrors the + /// behaviour of `ActiveExecCommand` so the user sees *what* patch the + /// model wants to apply before being prompted to approve or deny it. + PendingPatch { + /// Identifier so that a future `PatchApplyEnd` can update the entry + /// with the final status (not yet implemented). + lines: Vec>, + }, +} + +impl HistoryCell { + pub(crate) fn new_user_prompt(message: String) -> Self { + let mut lines: Vec> = Vec::new(); + lines.push(Line::from("user".cyan().bold())); + lines.extend(message.lines().map(|l| Line::from(l.to_string()))); + lines.push(Line::from("")); + + HistoryCell::UserPrompt { lines } + } + + pub(crate) fn new_agent_message(message: String) -> Self { + let mut lines: Vec> = Vec::new(); + lines.push(Line::from("codex".magenta().bold())); + lines.extend(message.lines().map(|l| Line::from(l.to_string()))); + lines.push(Line::from("")); + + HistoryCell::AgentMessage { lines } + } + + pub(crate) fn new_active_exec_command(call_id: String, command: Vec) -> Self { + let command_escaped = escape_command(&command); + let start = Instant::now(); + + let lines: Vec> = vec![ + Line::from(vec!["command".magenta(), " running...".dim()]), + Line::from(format!("$ {command_escaped}")), + Line::from(""), + ]; + + HistoryCell::ActiveExecCommand { + call_id, + command: command_escaped, + start, + lines, + } + } + + pub(crate) fn new_completed_exec_command(command: String, output: CommandOutput) -> Self { + let CommandOutput { + exit_code, + stdout, + stderr, + duration, + } = output; + + let mut lines: Vec> = Vec::new(); + + // Title depends on whether we have output yet. + let title_line = Line::from(vec![ + "command".magenta(), + format!(" (code: {}, duration: {:?})", exit_code, duration).dim(), + ]); + lines.push(title_line); + + const MAX_LINES: usize = 5; + + let src = if exit_code == 0 { stdout } else { stderr }; + + lines.push(Line::from(format!("$ {command}"))); + let mut lines_iter = src.lines(); + for raw in lines_iter.by_ref().take(MAX_LINES) { + lines.push(ansi_escape_line(raw).dim()); + } + let remaining = lines_iter.count(); + if remaining > 0 { + lines.push(Line::from(format!("... {} additional lines", remaining)).dim()); + } + lines.push(Line::from("")); + + HistoryCell::CompletedExecCommand { lines } + } + + pub(crate) fn new_background_event(message: String) -> Self { + let mut lines: Vec> = Vec::new(); + lines.push(Line::from("event".dim())); + lines.extend(message.lines().map(|l| Line::from(l.to_string()).dim())); + lines.push(Line::from("")); + HistoryCell::BackgroundEvent { lines } + } + + pub(crate) fn new_session_info( + model: String, + cwd: std::path::PathBuf, + approval_policy: codex_core::protocol::AskForApproval, + ) -> Self { + let mut lines: Vec> = Vec::new(); + + lines.push(Line::from("codex session:".magenta().bold())); + lines.push(Line::from(vec!["↳ model: ".bold(), model.into()])); + lines.push(Line::from(vec![ + "↳ cwd: ".bold(), + cwd.display().to_string().into(), + ])); + lines.push(Line::from(vec![ + "↳ approval: ".bold(), + format!("{:?}", approval_policy).into(), + ])); + lines.push(Line::from("")); + + HistoryCell::SessionInfo { lines } + } + + /// Create a new `PendingPatch` cell that lists the file‑level summary of + /// a proposed patch. The summary lines should already be formatted (e.g. + /// "A path/to/file.rs"). + pub(crate) fn new_patch_event( + event_type: PatchEventType, + changes: HashMap, + ) -> Self { + let title = match event_type { + PatchEventType::ApprovalRequest => "proposed patch", + PatchEventType::ApplyBegin { + auto_approved: true, + } => "applying patch", + PatchEventType::ApplyBegin { + auto_approved: false, + } => { + let lines = vec![Line::from("patch applied".magenta().bold())]; + return Self::PendingPatch { lines }; + } + }; + + let summary_lines = create_diff_summary(changes); + + let mut lines: Vec> = Vec::new(); + + // Header similar to the command formatter so patches are visually + // distinct while still fitting the overall colour scheme. + lines.push(Line::from(title.magenta().bold())); + + for line in summary_lines { + if line.starts_with('+') { + lines.push(line.green().into()); + } else if line.starts_with('-') { + lines.push(line.red().into()); + } else if let Some(space_idx) = line.find(' ') { + let kind_owned = line[..space_idx].to_string(); + let rest_owned = line[space_idx + 1..].to_string(); + + let style_for = |fg: Color| Style::default().fg(fg).add_modifier(Modifier::BOLD); + + let styled_kind = match kind_owned.as_str() { + "A" => RtSpan::styled(kind_owned.clone(), style_for(Color::Green)), + "D" => RtSpan::styled(kind_owned.clone(), style_for(Color::Red)), + "M" => RtSpan::styled(kind_owned.clone(), style_for(Color::Yellow)), + "R" | "C" => RtSpan::styled(kind_owned.clone(), style_for(Color::Cyan)), + _ => RtSpan::raw(kind_owned.clone()), + }; + + let styled_line = + RtLine::from(vec![styled_kind, RtSpan::raw(" "), RtSpan::raw(rest_owned)]); + lines.push(styled_line); + } else { + lines.push(Line::from(line)); + } + } + + lines.push(Line::from("")); + + HistoryCell::PendingPatch { lines } + } + + pub(crate) fn lines(&self) -> &Vec> { + match self { + HistoryCell::UserPrompt { lines, .. } + | HistoryCell::AgentMessage { lines, .. } + | HistoryCell::BackgroundEvent { lines, .. } + | HistoryCell::SessionInfo { lines, .. } + | HistoryCell::ActiveExecCommand { lines, .. } + | HistoryCell::CompletedExecCommand { lines, .. } + | HistoryCell::PendingPatch { lines, .. } => lines, + } + } +} + +fn create_diff_summary(changes: HashMap) -> Vec { + // Build a concise, human‑readable summary list similar to the + // `git status` short format so the user can reason about the + // patch without scrolling. + let mut summaries: Vec = Vec::new(); + for (path, change) in &changes { + use codex_core::protocol::FileChange::*; + match change { + Add { content } => { + let added = content.lines().count(); + summaries.push(format!("A {} (+{added})", path.display())); + } + Delete => { + summaries.push(format!("D {}", path.display())); + } + Update { + unified_diff, + move_path, + } => { + if let Some(new_path) = move_path { + summaries.push(format!("R {} → {}", path.display(), new_path.display(),)); + } else { + summaries.push(format!("M {}", path.display(),)); + } + summaries.extend(unified_diff.lines().map(|s| s.to_string())); + } + } + } + + summaries +} diff --git a/codex-rs/tui/src/lib.rs b/codex-rs/tui/src/lib.rs new file mode 100644 index 00000000..598d3eaf --- /dev/null +++ b/codex-rs/tui/src/lib.rs @@ -0,0 +1,165 @@ +// Forbid accidental stdout/stderr writes in the *library* portion of the TUI. +// The standalone `codex-tui` binary prints a short help message before the +// alternate‑screen mode starts; that file opts‑out locally via `allow`. +#![deny(clippy::print_stdout, clippy::print_stderr)] + +use app::App; +use codex_core::util::is_inside_git_repo; +use log_layer::TuiLogLayer; +use std::fs::OpenOptions; +use tracing_appender::non_blocking; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; + +mod app; +mod app_event; +mod bottom_pane; +mod chatwidget; +mod cli; +mod conversation_history_widget; +mod exec_command; +mod git_warning_screen; +mod history_cell; +mod log_layer; +mod status_indicator_widget; +mod tui; +mod user_approval_widget; + +pub use cli::Cli; + +pub fn run_main(cli: Cli) -> std::io::Result<()> { + assert_env_var_set(); + + // Open (or create) your log file, appending to it. + let file = OpenOptions::new() + .create(true) + .append(true) + .open("/tmp/codex-rs.log")?; + + // Wrap file in non‑blocking writer. + let (non_blocking, _guard) = non_blocking(file); + + // use RUST_LOG env var, default to trace for codex crates. + let env_filter = || { + EnvFilter::try_from_default_env() + .unwrap_or_else(|_| EnvFilter::new("codex=trace,codex_tui=trace")) + }; + + // Build layered subscriber: + let file_layer = tracing_subscriber::fmt::layer() + .with_writer(non_blocking) + .with_target(false) + .with_filter(env_filter()); + + // Channel that carries formatted log lines to the UI. + let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel::(); + let tui_layer = TuiLogLayer::new(log_tx.clone(), 120).with_filter(env_filter()); + + let _ = tracing_subscriber::registry() + .with(file_layer) + .with(tui_layer) + .try_init(); + + // Determine whether we need to display the "not a git repo" warning + // modal. The flag is shown when the current working directory is *not* + // inside a Git repository **and** the user did *not* pass the + // `--allow-no-git-exec` flag. + let show_git_warning = !cli.skip_git_repo_check && !is_inside_git_repo(); + + try_run_ratatui_app(cli, show_git_warning, log_rx); + Ok(()) +} + +#[expect( + clippy::print_stderr, + reason = "Resort to stderr in exceptional situations." +)] +fn try_run_ratatui_app( + cli: Cli, + show_git_warning: bool, + log_rx: tokio::sync::mpsc::UnboundedReceiver, +) { + if let Err(report) = run_ratatui_app(cli, show_git_warning, log_rx) { + eprintln!("Error: {report:?}"); + } +} + +fn run_ratatui_app( + cli: Cli, + show_git_warning: bool, + mut log_rx: tokio::sync::mpsc::UnboundedReceiver, +) -> color_eyre::Result<()> { + color_eyre::install()?; + + // Forward panic reports through the tracing stack so that they appear in + // the status indicator instead of breaking the alternate screen – the + // normal colour‑eyre hook writes to stderr which would corrupt the UI. + std::panic::set_hook(Box::new(|info| { + tracing::error!("panic: {info}"); + })); + let mut terminal = tui::init()?; + terminal.clear()?; + + let Cli { + prompt, + images, + approval_policy, + sandbox_policy: sandbox, + model, + .. + } = cli; + + let approval_policy = approval_policy.into(); + let sandbox_policy = sandbox.into(); + + let mut app = App::new( + approval_policy, + sandbox_policy, + prompt, + show_git_warning, + images, + model, + ); + + // Bridge log receiver into the AppEvent channel so latest log lines update the UI. + { + let app_event_tx = app.event_sender(); + tokio::spawn(async move { + while let Some(line) = log_rx.recv().await { + let _ = app_event_tx.send(crate::app_event::AppEvent::LatestLog(line)); + } + }); + } + + let app_result = app.run(&mut terminal); + + restore(); + app_result +} + +#[expect( + clippy::print_stderr, + reason = "TUI should not have been displayed yet, so we can write to stderr." +)] +fn assert_env_var_set() { + if std::env::var("OPENAI_API_KEY").is_err() { + eprintln!("Welcome to codex! It looks like you're missing: `OPENAI_API_KEY`"); + eprintln!( + "Create an API key (https://platform.openai.com) and export as an environment variable" + ); + std::process::exit(1); + } +} + +#[expect( + clippy::print_stderr, + reason = "TUI should no longer be displayed, so we can write to stderr." +)] +fn restore() { + if let Err(err) = tui::restore() { + eprintln!( + "failed to restore terminal. Run `reset` or restart your terminal to recover: {}", + err + ); + } +} diff --git a/codex-rs/tui/src/log_layer.rs b/codex-rs/tui/src/log_layer.rs new file mode 100644 index 00000000..bc100cc3 --- /dev/null +++ b/codex-rs/tui/src/log_layer.rs @@ -0,0 +1,94 @@ +//! Custom `tracing_subscriber` layer that forwards every formatted log event to the +//! TUI so the status indicator can display the *latest* log line while a task is +//! running. +//! +//! The layer is intentionally extremely small: we implement `on_event()` only and +//! ignore spans/metadata because we only care about the already‑formatted output +//! that the default `fmt` layer would print. We therefore borrow the same +//! formatter (`tracing_subscriber::fmt::format::FmtSpan`) used by the default +//! fmt layer so the text matches what is written to the log file. + +use std::fmt::Write as _; + +use tokio::sync::mpsc::UnboundedSender; +use tracing::field::Field; +use tracing::field::Visit; +use tracing::Event; +use tracing::Subscriber; +use tracing_subscriber::layer::Context; +use tracing_subscriber::registry::LookupSpan; +use tracing_subscriber::Layer; + +/// Maximum characters forwarded to the TUI. Longer messages are truncated so the +/// single‑line status indicator cannot overflow the viewport. +#[allow(dead_code)] +const _DEFAULT_MAX_LEN: usize = 120; + +pub struct TuiLogLayer { + tx: UnboundedSender, + max_len: usize, +} + +impl TuiLogLayer { + pub fn new(tx: UnboundedSender, max_len: usize) -> Self { + Self { + tx, + max_len: max_len.max(8), + } + } +} + +impl Layer for TuiLogLayer +where + S: Subscriber + for<'a> LookupSpan<'a>, +{ + fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { + // Build a terse line like `[TRACE core::session] message …` by visiting + // fields into a buffer. This avoids pulling in the heavyweight + // formatter machinery. + + struct Visitor<'a> { + buf: &'a mut String, + } + + impl Visit for Visitor<'_> { + fn record_debug(&mut self, _field: &Field, value: &dyn std::fmt::Debug) { + let _ = write!(self.buf, " {:?}", value); + } + } + + let mut buf = String::new(); + let _ = write!( + buf, + "[{} {}]", + event.metadata().level(), + event.metadata().target() + ); + + event.record(&mut Visitor { buf: &mut buf }); + + // `String::truncate` operates on UTF‑8 code‑point boundaries and will + // panic if the provided index is not one. Because we limit the log + // line by its **byte** length we can not guarantee that the index we + // want to cut at happens to be on a boundary. Therefore we fall back + // to a simple, boundary‑safe loop that pops complete characters until + // the string is within the designated size. + + if buf.len() > self.max_len { + // Attempt direct truncate at the byte index. If that is not a + // valid boundary we advance to the next one ( ≤3 bytes away ). + if buf.is_char_boundary(self.max_len) { + buf.truncate(self.max_len); + } else { + let mut idx = self.max_len; + while idx < buf.len() && !buf.is_char_boundary(idx) { + idx += 1; + } + buf.truncate(idx); + } + } + + let sanitized = buf.replace(['\n', '\r'], " "); + let _ = self.tx.send(sanitized); + } +} diff --git a/codex-rs/tui/src/main.rs b/codex-rs/tui/src/main.rs new file mode 100644 index 00000000..56fd5cda --- /dev/null +++ b/codex-rs/tui/src/main.rs @@ -0,0 +1,10 @@ +use clap::Parser; +use codex_tui::run_main; +use codex_tui::Cli; + +#[tokio::main] +async fn main() -> std::io::Result<()> { + let cli = Cli::parse(); + run_main(cli)?; + Ok(()) +} diff --git a/codex-rs/tui/src/status_indicator_widget.rs b/codex-rs/tui/src/status_indicator_widget.rs new file mode 100644 index 00000000..e87beb5e --- /dev/null +++ b/codex-rs/tui/src/status_indicator_widget.rs @@ -0,0 +1,214 @@ +//! A live status indicator that shows the *latest* log line emitted by the +//! application while the agent is processing a long‑running task. +//! +//! It replaces the old spinner animation with real log feedback so users can +//! watch Codex “think” in real‑time. Whenever new text is provided via +//! [`StatusIndicatorWidget::update_text`], the parent widget triggers a +//! redraw so the change is visible immediately. + +use std::sync::atomic::AtomicBool; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; +use std::sync::mpsc::Sender; +use std::sync::Arc; +use std::thread; +use std::time::Duration; + +use crossterm::event::KeyEvent; +use ratatui::buffer::Buffer; +use ratatui::layout::Alignment; +use ratatui::layout::Rect; +use ratatui::style::Color; +use ratatui::style::Modifier; +use ratatui::style::Style; +use ratatui::style::Stylize; +use ratatui::text::Line; +use ratatui::text::Span; +use ratatui::widgets::Block; +use ratatui::widgets::BorderType; +use ratatui::widgets::Borders; +use ratatui::widgets::Padding; +use ratatui::widgets::Paragraph; +use ratatui::widgets::WidgetRef; + +use crate::app_event::AppEvent; + +use codex_ansi_escape::ansi_escape_line; + +pub(crate) struct StatusIndicatorWidget { + /// Latest text to display (truncated to the available width at render + /// time). + text: String, + + /// Height in terminal rows – matches the height of the textarea at the + /// moment the task started so the UI does not jump when we toggle between + /// input mode and loading mode. + height: u16, + + frame_idx: std::sync::Arc, + running: std::sync::Arc, + // Keep one sender alive to prevent the channel from closing while the + // animation thread is still running. The field itself is currently not + // accessed anywhere, therefore the leading underscore silences the + // `dead_code` warning without affecting behavior. + _app_event_tx: Sender, +} + +impl StatusIndicatorWidget { + /// Create a new status indicator and start the animation timer. + pub(crate) fn new(app_event_tx: Sender, height: u16) -> Self { + let frame_idx = Arc::new(AtomicUsize::new(0)); + let running = Arc::new(AtomicBool::new(true)); + + // Animation thread. + { + let frame_idx_clone = Arc::clone(&frame_idx); + let running_clone = Arc::clone(&running); + let app_event_tx_clone = app_event_tx.clone(); + thread::spawn(move || { + let mut counter = 0usize; + while running_clone.load(Ordering::Relaxed) { + std::thread::sleep(Duration::from_millis(200)); + counter = counter.wrapping_add(1); + frame_idx_clone.store(counter, Ordering::Relaxed); + if app_event_tx_clone.send(AppEvent::Redraw).is_err() { + break; + } + } + }); + } + + Self { + text: String::from("waiting for logs…"), + height: height.max(3), + frame_idx, + running, + _app_event_tx: app_event_tx, + } + } + + pub(crate) fn handle_key_event( + &mut self, + _key: KeyEvent, + ) -> Result> { + // The indicator does not handle any input – always return `false`. + Ok(false) + } + + /// Preferred height in terminal rows. + pub(crate) fn get_height(&self) -> u16 { + self.height + } + + /// Update the line that is displayed in the widget. + pub(crate) fn update_text(&mut self, text: String) { + self.text = text.replace(['\n', '\r'], " "); + } +} + +impl Drop for StatusIndicatorWidget { + fn drop(&mut self) { + use std::sync::atomic::Ordering; + self.running.store(false, Ordering::Relaxed); + } +} + +impl WidgetRef for StatusIndicatorWidget { + fn render_ref(&self, area: Rect, buf: &mut Buffer) { + let widget_style = Style::default(); + let block = Block::default() + .padding(Padding::new(1, 0, 0, 0)) + .borders(Borders::ALL) + .border_type(BorderType::Rounded) + .border_style(widget_style); + // Animated 3‑dot pattern inside brackets. The *active* dot is bold + // white, the others are dim. + const DOT_COUNT: usize = 3; + let idx = self.frame_idx.load(std::sync::atomic::Ordering::Relaxed); + let phase = idx % (DOT_COUNT * 2 - 2); + let active = if phase < DOT_COUNT { + phase + } else { + (DOT_COUNT * 2 - 2) - phase + }; + + let mut header_spans: Vec> = Vec::new(); + + header_spans.push(Span::styled( + "Working ", + Style::default() + .fg(Color::White) + .add_modifier(Modifier::BOLD), + )); + + header_spans.push(Span::styled( + "[", + Style::default() + .fg(Color::White) + .add_modifier(Modifier::BOLD), + )); + + for i in 0..DOT_COUNT { + let style = if i == active { + Style::default() + .fg(Color::White) + .add_modifier(Modifier::BOLD) + } else { + Style::default().dim() + }; + header_spans.push(Span::styled(".", style)); + } + + header_spans.push(Span::styled( + "] ", + Style::default() + .fg(Color::White) + .add_modifier(Modifier::BOLD), + )); + + // Ensure we do not overflow width. + let inner_width = block.inner(area).width as usize; + + // Sanitize and colour‑strip the potentially colourful log text. This + // ensures that **no** raw ANSI escape sequences leak into the + // back‑buffer which would otherwise cause cursor jumps or stray + // artefacts when the terminal is resized. + let line = ansi_escape_line(&self.text); + let mut sanitized_tail: String = line + .spans + .iter() + .map(|s| s.content.as_ref()) + .collect::>() + .join(""); + + // Truncate *after* stripping escape codes so width calculation is + // accurate. See UTF‑8 boundary comments above. + let header_len: usize = header_spans.iter().map(|s| s.content.len()).sum(); + + if header_len + sanitized_tail.len() > inner_width { + let available_bytes = inner_width.saturating_sub(header_len); + + if sanitized_tail.is_char_boundary(available_bytes) { + sanitized_tail.truncate(available_bytes); + } else { + let mut idx = available_bytes; + while idx < sanitized_tail.len() && !sanitized_tail.is_char_boundary(idx) { + idx += 1; + } + sanitized_tail.truncate(idx); + } + } + + let mut spans = header_spans; + + // Re‑apply the DIM modifier so the tail appears visually subdued + // irrespective of the colour information preserved by + // `ansi_escape_line`. + spans.push(Span::styled(sanitized_tail, Style::default().dim())); + + let paragraph = Paragraph::new(Line::from(spans)) + .block(block) + .alignment(Alignment::Left); + paragraph.render_ref(area, buf); + } +} diff --git a/codex-rs/tui/src/tui.rs b/codex-rs/tui/src/tui.rs new file mode 100644 index 00000000..0753dcb0 --- /dev/null +++ b/codex-rs/tui/src/tui.rs @@ -0,0 +1,37 @@ +use std::io::stdout; +use std::io::Stdout; +use std::io::{self}; + +use ratatui::backend::CrosstermBackend; +use ratatui::crossterm::execute; +use ratatui::crossterm::terminal::disable_raw_mode; +use ratatui::crossterm::terminal::enable_raw_mode; +use ratatui::crossterm::terminal::EnterAlternateScreen; +use ratatui::crossterm::terminal::LeaveAlternateScreen; +use ratatui::Terminal; + +/// A type alias for the terminal type used in this application +pub type Tui = Terminal>; + +/// Initialize the terminal +pub fn init() -> io::Result { + execute!(stdout(), EnterAlternateScreen)?; + enable_raw_mode()?; + set_panic_hook(); + Terminal::new(CrosstermBackend::new(stdout())) +} + +fn set_panic_hook() { + let hook = std::panic::take_hook(); + std::panic::set_hook(Box::new(move |panic_info| { + let _ = restore(); // ignore any errors as we are already failing + hook(panic_info); + })); +} + +/// Restore the terminal to its original state +pub fn restore() -> io::Result<()> { + execute!(stdout(), LeaveAlternateScreen)?; + disable_raw_mode()?; + Ok(()) +} diff --git a/codex-rs/tui/src/user_approval_widget.rs b/codex-rs/tui/src/user_approval_widget.rs new file mode 100644 index 00000000..05841aa3 --- /dev/null +++ b/codex-rs/tui/src/user_approval_widget.rs @@ -0,0 +1,395 @@ +//! A modal widget that prompts the user to approve or deny an action +//! requested by the agent. +//! +//! This is a (very) rough port of +//! `src/components/chat/terminal-chat-command-review.tsx` from the TypeScript +//! UI to Rust using [`ratatui`]. The goal is feature‑parity for the keyboard +//! driven workflow – a fully‑fledged visual match is not required. + +use std::path::PathBuf; +use std::sync::mpsc::SendError; +use std::sync::mpsc::Sender; + +use codex_core::protocol::Op; +use codex_core::protocol::ReviewDecision; +use crossterm::event::KeyCode; +use crossterm::event::KeyEvent; +use ratatui::buffer::Buffer; +use ratatui::layout::Rect; +use ratatui::prelude::*; +use ratatui::text::Line; +use ratatui::text::Span; +use ratatui::widgets::Block; +use ratatui::widgets::BorderType; +use ratatui::widgets::Borders; +use ratatui::widgets::List; +use ratatui::widgets::Paragraph; +use ratatui::widgets::Widget; +use ratatui::widgets::WidgetRef; +use tui_input::backend::crossterm::EventHandler; +use tui_input::Input; + +use crate::app_event::AppEvent; +use crate::exec_command::relativize_to_home; +use crate::exec_command::strip_bash_lc_and_escape; + +/// Request coming from the agent that needs user approval. +pub(crate) enum ApprovalRequest { + Exec { + id: String, + command: Vec, + cwd: PathBuf, + reason: Option, + }, + ApplyPatch { + id: String, + reason: Option, + grant_root: Option, + }, +} + +// ────────────────────────────────────────────────────────────────────────── + +/// Options displayed in the *select* mode. +struct SelectOption { + label: &'static str, + decision: Option, + /// `true` when this option switches the widget to *input* mode. + enters_input_mode: bool, +} + +// keep in same order as in the TS implementation +const SELECT_OPTIONS: &[SelectOption] = &[ + SelectOption { + label: "Yes (y)", + decision: Some(ReviewDecision::Approved), + + enters_input_mode: false, + }, + SelectOption { + label: "Yes, always approve this exact command for this session (a)", + decision: Some(ReviewDecision::ApprovedForSession), + + enters_input_mode: false, + }, + SelectOption { + label: "Edit or give feedback (e)", + decision: None, + + enters_input_mode: true, + }, + SelectOption { + label: "No, and keep going (n)", + decision: Some(ReviewDecision::Denied), + + enters_input_mode: false, + }, + SelectOption { + label: "No, and stop for now (esc)", + decision: Some(ReviewDecision::Abort), + + enters_input_mode: false, + }, +]; + +/// Internal mode the widget is in – mirrors the TypeScript component. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum Mode { + Select, + Input, +} + +/// A modal prompting the user to approve or deny the pending request. +pub(crate) struct UserApprovalWidget<'a> { + approval_request: ApprovalRequest, + app_event_tx: Sender, + confirmation_prompt: Paragraph<'a>, + + /// Currently selected index in *select* mode. + selected_option: usize, + + /// State for the optional input widget. + input: Input, + + /// Current mode. + mode: Mode, + + /// Set to `true` once a decision has been sent – the parent view can then + /// remove this widget from its queue. + done: bool, +} + +// Number of lines automatically added by ratatui’s [`Block`] when +// borders are enabled (one at the top, one at the bottom). +const BORDER_LINES: u16 = 2; + +impl UserApprovalWidget<'_> { + pub(crate) fn new(approval_request: ApprovalRequest, app_event_tx: Sender) -> Self { + let input = Input::default(); + let confirmation_prompt = match &approval_request { + ApprovalRequest::Exec { + command, + cwd, + reason, + .. + } => { + let cmd = strip_bash_lc_and_escape(command); + // Maybe try to relativize to the cwd of this process first? + // Will make cwd_str shorter in the common case. + let cwd_str = match relativize_to_home(cwd) { + Some(rel) => format!("~/{}", rel.display()), + None => cwd.display().to_string(), + }; + let mut contents: Vec = vec![ + Line::from("Shell Command".bold()), + Line::from(""), + Line::from(vec![ + format!("{cwd_str}$").dim(), + Span::from(format!(" {cmd}")), + ]), + Line::from(""), + ]; + if let Some(reason) = reason { + contents.push(Line::from(reason.clone().italic())); + contents.push(Line::from("")); + } + contents.extend(vec![Line::from("Allow command?"), Line::from("")]); + Paragraph::new(contents) + } + ApprovalRequest::ApplyPatch { + reason, grant_root, .. + } => { + let mut contents: Vec = + vec![Line::from("Apply patch".bold()), Line::from("")]; + + if let Some(r) = reason { + contents.push(Line::from(r.clone().italic())); + contents.push(Line::from("")); + } + + if let Some(root) = grant_root { + contents.push(Line::from(format!( + "This will grant write access to {} for the remainder of this session.", + root.display() + ))); + contents.push(Line::from("")); + } + + contents.push(Line::from("Allow changes?")); + contents.push(Line::from("")); + + Paragraph::new(contents) + } + }; + + Self { + approval_request, + app_event_tx, + confirmation_prompt, + selected_option: 0, + input, + mode: Mode::Select, + done: false, + } + } + + pub(crate) fn get_height(&self, area: &Rect) -> u16 { + let confirmation_prompt_height = + self.get_confirmation_prompt_height(area.width - BORDER_LINES); + + match self.mode { + Mode::Select => { + let num_option_lines = SELECT_OPTIONS.len() as u16; + confirmation_prompt_height + num_option_lines + BORDER_LINES + } + Mode::Input => { + // 1. "Give the model feedback ..." prompt + // 2. A single‑line input field (we allocate exactly one row; + // the `tui-input` widget will scroll horizontally if the + // text exceeds the width). + const INPUT_PROMPT_LINES: u16 = 1; + const INPUT_FIELD_LINES: u16 = 1; + + confirmation_prompt_height + INPUT_PROMPT_LINES + INPUT_FIELD_LINES + BORDER_LINES + } + } + } + + fn get_confirmation_prompt_height(&self, width: u16) -> u16 { + // Should cache this for last value of width. + self.confirmation_prompt.line_count(width) as u16 + } + + /// Process a `KeyEvent` coming from crossterm. Always consumes the event + /// while the modal is visible. + /// Process a key event originating from crossterm. As the modal fully + /// captures input while visible, we don’t need to report whether the event + /// was consumed—callers can assume it always is. + pub(crate) fn handle_key_event(&mut self, key: KeyEvent) -> Result<(), SendError> { + match self.mode { + Mode::Select => self.handle_select_key(key)?, + Mode::Input => self.handle_input_key(key)?, + } + Ok(()) + } + + fn handle_select_key(&mut self, key_event: KeyEvent) -> Result<(), SendError> { + match key_event.code { + KeyCode::Up => { + if self.selected_option == 0 { + self.selected_option = SELECT_OPTIONS.len() - 1; + } else { + self.selected_option -= 1; + } + return Ok(()); + } + KeyCode::Down => { + self.selected_option = (self.selected_option + 1) % SELECT_OPTIONS.len(); + return Ok(()); + } + KeyCode::Char('y') => { + self.send_decision(ReviewDecision::Approved)?; + return Ok(()); + } + KeyCode::Char('a') => { + self.send_decision(ReviewDecision::ApprovedForSession)?; + return Ok(()); + } + KeyCode::Char('n') => { + self.send_decision(ReviewDecision::Denied)?; + return Ok(()); + } + KeyCode::Char('e') => { + self.mode = Mode::Input; + return Ok(()); + } + KeyCode::Enter => { + let opt = &SELECT_OPTIONS[self.selected_option]; + if opt.enters_input_mode { + self.mode = Mode::Input; + } else if let Some(decision) = opt.decision { + self.send_decision(decision)?; + } + return Ok(()); + } + KeyCode::Esc => { + self.send_decision(ReviewDecision::Abort)?; + return Ok(()); + } + _ => {} + } + Ok(()) + } + + fn handle_input_key(&mut self, key_event: KeyEvent) -> Result<(), SendError> { + // Handle special keys first. + match key_event.code { + KeyCode::Enter => { + let feedback = self.input.value().to_string(); + self.send_decision_with_feedback(ReviewDecision::Denied, feedback)?; + return Ok(()); + } + KeyCode::Esc => { + // Cancel input – treat as deny without feedback. + self.send_decision(ReviewDecision::Denied)?; + return Ok(()); + } + _ => {} + } + + // Feed into input widget for normal editing. + let ct_event = crossterm::event::Event::Key(key_event); + self.input.handle_event(&ct_event); + Ok(()) + } + + fn send_decision(&mut self, decision: ReviewDecision) -> Result<(), SendError> { + self.send_decision_with_feedback(decision, String::new()) + } + + fn send_decision_with_feedback( + &mut self, + decision: ReviewDecision, + _feedback: String, + ) -> Result<(), SendError> { + let op = match &self.approval_request { + ApprovalRequest::Exec { id, .. } => Op::ExecApproval { + id: id.clone(), + decision, + }, + ApprovalRequest::ApplyPatch { id, .. } => Op::PatchApproval { + id: id.clone(), + decision, + }, + }; + + // Ignore feedback for now – the current `Op` variants do not carry it. + + // Forward the Op to the agent. The caller (ChatWidget) will trigger a + // redraw after it processes the resulting state change, so we avoid + // issuing an extra Redraw here to prevent a transient frame where the + // modal is still visible. + self.app_event_tx.send(AppEvent::CodexOp(op))?; + self.done = true; + Ok(()) + } + + /// Returns `true` once the user has made a decision and the widget no + /// longer needs to be displayed. + pub(crate) fn is_complete(&self) -> bool { + self.done + } + + // ────────────────────────────────────────────────────────────────────── +} + +const PLAIN: Style = Style::new(); +const BLUE_FG: Style = Style::new().fg(Color::Blue); + +impl WidgetRef for &UserApprovalWidget<'_> { + fn render_ref(&self, area: Rect, buf: &mut Buffer) { + // Take the area, wrap it in a block with a border, and divide up the + // remaining area into two chunks: one for the confirmation prompt and + // one for the response. + let outer = Block::default() + .title("Review") + .borders(Borders::ALL) + .border_type(BorderType::Rounded); + let inner = outer.inner(area); + let prompt_height = self.get_confirmation_prompt_height(inner.width); + let chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Length(prompt_height), Constraint::Min(0)]) + .split(inner); + let prompt_chunk = chunks[0]; + let response_chunk = chunks[1]; + + // Build the inner lines based on the mode. Collect them into a List of + // non-wrapping lines rather than a Paragraph because get_height(Rect) + // depends on this behavior for its calculation. + let lines = match self.mode { + Mode::Select => SELECT_OPTIONS + .iter() + .enumerate() + .map(|(idx, opt)| { + let (prefix, style) = if idx == self.selected_option { + ("▶", BLUE_FG) + } else { + (" ", PLAIN) + }; + Line::styled(format!(" {prefix} {}", opt.label), style) + }) + .collect(), + Mode::Input => { + vec![ + Line::from("Give the model feedback on this command:"), + Line::from(self.input.value()), + ] + } + }; + + outer.render(area, buf); + self.confirmation_prompt.clone().render(prompt_chunk, buf); + Widget::render(List::new(lines), response_chunk, buf); + } +} diff --git a/codex-rs/tui/tests/status_indicator.rs b/codex-rs/tui/tests/status_indicator.rs new file mode 100644 index 00000000..62f190d2 --- /dev/null +++ b/codex-rs/tui/tests/status_indicator.rs @@ -0,0 +1,24 @@ +//! Regression test: ensure that `StatusIndicatorWidget` sanitises ANSI escape +//! sequences so that no raw `\x1b` bytes are written into the backing +//! buffer. Rendering logic is tricky to unit‑test end‑to‑end, therefore we +//! verify the *public* contract of `ansi_escape_line()` which the widget now +//! relies on. + +use codex_ansi_escape::ansi_escape_line; + +#[test] +fn ansi_escape_line_strips_escape_sequences() { + let text_in_ansi_red = "\x1b[31mRED\x1b[0m"; + + // The returned line must contain three printable glyphs and **no** raw + // escape bytes. + let line = ansi_escape_line(text_in_ansi_red); + + let combined: String = line + .spans + .iter() + .map(|span| span.content.to_string()) + .collect(); + + assert_eq!(combined, "RED"); +}