From 831e6fa6b5478cc0f77eb520933e4d4dfe83a056 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Tue, 11 Nov 2025 16:01:04 +0100 Subject: [PATCH] =?UTF-8?q?Complete=20comprehensive=20Codex=20=E2=86=92=20?= =?UTF-8?q?LLMX=20branding=20update?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixed all remaining user-facing "Codex" references across entire codebase: - Updated all UI strings and error messages - Fixed GitHub issue templates and workflows - Updated MCP server tool descriptions and error messages - Fixed all test messages and comments - Updated documentation comments - Changed auth keyring service name to "LLMX Auth" Reduced from 201 occurrences to only code identifiers (struct/type names). Changes span 78 files across Rust, Python, YAML, and JSON. πŸ€– Generated with Claude Code Co-Authored-By: Claude --- .devcontainer/devcontainer.json | 2 +- .github/ISSUE_TEMPLATE/2-bug-report.yml | 8 +- .github/ISSUE_TEMPLATE/3-docs-issue.yml | 2 +- .github/ISSUE_TEMPLATE/4-feature-request.yml | 6 +- .github/workflows/issue-deduplicator.yml | 10 +-- .github/workflows/issue-labeler.yml | 16 ++-- llmx-cli/scripts/build_npm_package.py | 2 +- llmx-cli/scripts/install_native_deps.py | 4 +- llmx-rs/app-server-protocol/src/bin/export.rs | 2 +- .../tests/suite/create_conversation.rs | 2 +- .../tests/suite/fuzzy_file_search.rs | 2 +- llmx-rs/app-server/tests/suite/interrupt.rs | 2 +- .../suite/llmx_message_processor_flow.rs | 8 +- .../app-server/tests/suite/send_message.rs | 8 +- .../app-server/tests/suite/v2/turn_start.rs | 4 +- llmx-rs/arg0/src/lib.rs | 2 +- llmx-rs/chatgpt/tests/task_turn_fixture.json | 2 +- llmx-rs/cli/src/main.rs | 4 +- llmx-rs/common/src/approval_presets.rs | 6 +- llmx-rs/common/src/config_override.rs | 2 +- llmx-rs/core/src/auth/storage.rs | 2 +- llmx-rs/core/src/config/edit.rs | 2 +- llmx-rs/core/src/default_client.rs | 8 +- llmx-rs/core/src/error.rs | 8 +- llmx-rs/core/src/event_mapping.rs | 4 +- llmx-rs/core/src/llmx.rs | 4 +- llmx-rs/core/src/mcp_connection_manager.rs | 6 +- llmx-rs/core/src/model_provider_info.rs | 6 +- llmx-rs/core/src/rollout/policy.rs | 2 +- llmx-rs/core/src/rollout/recorder.rs | 2 +- llmx-rs/core/src/spawn.rs | 6 +- llmx-rs/core/src/tools/spec.rs | 2 +- .../core/tests/chat_completions_payload.rs | 16 ++-- llmx-rs/core/tests/chat_completions_sse.rs | 16 ++-- llmx-rs/core/tests/common/lib.rs | 4 +- llmx-rs/core/tests/suite/cli_stream.rs | 2 +- llmx-rs/core/tests/suite/client.rs | 4 +- llmx-rs/core/tests/suite/compact.rs | 4 +- .../core/tests/suite/compact_resume_fork.rs | 2 +- llmx-rs/core/tests/suite/fork_conversation.rs | 2 +- llmx-rs/core/tests/suite/quota_exceeded.rs | 2 +- llmx-rs/core/tests/suite/review.rs | 2 +- llmx-rs/core/tests/suite/undo.rs | 4 +- llmx-rs/core/tests/suite/view_image.rs | 2 +- .../src/event_processor_with_human_output.rs | 2 +- llmx-rs/exec/src/lib.rs | 8 +- llmx-rs/feedback/src/lib.rs | 2 +- llmx-rs/login/src/device_code_auth.rs | 2 +- llmx-rs/mcp-server/src/exec_approval.rs | 2 +- llmx-rs/mcp-server/src/llmx_tool_config.rs | 34 ++++---- llmx-rs/mcp-server/src/llmx_tool_runner.rs | 10 +-- llmx-rs/mcp-server/src/message_processor.rs | 20 ++--- llmx-rs/mcp-server/src/patch_approval.rs | 2 +- .../mcp-server/tests/common/mcp_process.rs | 2 +- llmx-rs/mcp-server/tests/suite/llmx_tool.rs | 8 +- llmx-rs/mcp-types/generate_mcp_types.py | 4 +- llmx-rs/mcp-types/src/lib.rs | 2 +- llmx-rs/process-hardening/src/lib.rs | 2 +- llmx-rs/protocol/src/models.rs | 2 +- llmx-rs/protocol/src/protocol.rs | 4 +- .../rmcp-client/src/bin/test_stdio_server.rs | 2 +- .../src/bin/test_streamable_http_server.rs | 2 +- llmx-rs/rmcp-client/src/oauth.rs | 2 +- .../rmcp-client/src/perform_oauth_login.rs | 2 +- llmx-rs/rmcp-client/tests/resources.rs | 4 +- .../tui/src/bottom_pane/approval_overlay.rs | 4 +- llmx-rs/tui/src/bottom_pane/chat_composer.rs | 84 +++++++++---------- llmx-rs/tui/src/bottom_pane/feedback_view.rs | 2 +- .../src/bottom_pane/list_selection_view.rs | 8 +- llmx-rs/tui/src/bottom_pane/mod.rs | 14 ++-- llmx-rs/tui/src/chatwidget.rs | 12 +-- llmx-rs/tui/src/chatwidget/tests.rs | 8 +- llmx-rs/tui/src/history_cell.rs | 8 +- llmx-rs/tui/src/lib.rs | 2 +- llmx-rs/tui/src/slash_command.rs | 6 +- llmx-rs/tui/src/status/card.rs | 2 +- .../windows-sandbox-rs/sandbox_smoketests.py | 8 +- scripts/stage_npm_packages.py | 2 +- 78 files changed, 242 insertions(+), 242 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 1bed79c3..1dc5e2d5 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,5 +1,5 @@ { - "name": "Codex", + "name": "LLMX", "build": { "dockerfile": "Dockerfile", "context": "..", diff --git a/.github/ISSUE_TEMPLATE/2-bug-report.yml b/.github/ISSUE_TEMPLATE/2-bug-report.yml index 109f026c..43034193 100644 --- a/.github/ISSUE_TEMPLATE/2-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/2-bug-report.yml @@ -7,18 +7,18 @@ body: - type: markdown attributes: value: | - Thank you for submitting a bug report! It helps make Codex better for everyone. + Thank you for submitting a bug report! It helps make LLMX better for everyone. - If you need help or support using Codex, and are not reporting a bug, please post on [codex/discussions](https://github.com/openai/codex/discussions), where you can ask questions or engage with others on ideas for how to improve codex. + If you need help or support using LLMX, and are not reporting a bug, please post on [llmx/discussions](https://github.com/valknar/llmx/discussions), where you can ask questions or engage with others on ideas for how to improve llmx. - Make sure you are running the [latest](https://npmjs.com/package/@openai/codex) version of Codex CLI. The bug you are experiencing may already have been fixed. + Make sure you are running the [latest](https://npmjs.com/package/@llmx/llmx) version of LLMX CLI. The bug you are experiencing may already have been fixed. Please try to include as much information as possible. - type: input id: version attributes: - label: What version of Codex is running? + label: What version of LLMX is running? description: Copy the output of `codex --version` validations: required: true diff --git a/.github/ISSUE_TEMPLATE/3-docs-issue.yml b/.github/ISSUE_TEMPLATE/3-docs-issue.yml index 456602e6..6810153a 100644 --- a/.github/ISSUE_TEMPLATE/3-docs-issue.yml +++ b/.github/ISSUE_TEMPLATE/3-docs-issue.yml @@ -5,7 +5,7 @@ body: - type: markdown attributes: value: | - Thank you for submitting a documentation request. It helps make Codex better. + Thank you for submitting a documentation request. It helps make LLMX better. - type: dropdown attributes: label: What is the type of issue? diff --git a/.github/ISSUE_TEMPLATE/4-feature-request.yml b/.github/ISSUE_TEMPLATE/4-feature-request.yml index fea86edd..ba9ee053 100644 --- a/.github/ISSUE_TEMPLATE/4-feature-request.yml +++ b/.github/ISSUE_TEMPLATE/4-feature-request.yml @@ -1,16 +1,16 @@ name: 🎁 Feature Request -description: Propose a new feature for Codex +description: Propose a new feature for LLMX labels: - enhancement body: - type: markdown attributes: value: | - Is Codex missing a feature that you'd like to see? Feel free to propose it here. + Is LLMX missing a feature that you'd like to see? Feel free to propose it here. Before you submit a feature: 1. Search existing issues for similar features. If you find one, πŸ‘ it rather than opening a new one. - 2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex#contributing) for more details. + 2. The LLMX team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/valknar/llmx#contributing) for more details. - type: textarea id: feature diff --git a/.github/workflows/issue-deduplicator.yml b/.github/workflows/issue-deduplicator.yml index c36857ca..c8e89e8a 100644 --- a/.github/workflows/issue-deduplicator.yml +++ b/.github/workflows/issue-deduplicator.yml @@ -18,7 +18,7 @@ jobs: steps: - uses: actions/checkout@v5 - - name: Prepare Codex inputs + - name: Prepare LLMX inputs env: GH_TOKEN: ${{ github.token }} run: | @@ -42,7 +42,7 @@ jobs: > "$CURRENT_ISSUE_FILE" - id: codex - uses: openai/codex-action@main + uses: valknar/llmx-action@main with: openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }} allow-users: "*" @@ -98,7 +98,7 @@ jobs: try { parsed = JSON.parse(raw); } catch (error) { - core.info(`Codex output was not valid JSON. Raw output: ${raw}`); + core.info(`LLMX output was not valid JSON. Raw output: ${raw}`); core.info(`Parse error: ${error.message}`); return; } @@ -112,7 +112,7 @@ jobs: const filteredIssues = issues.filter((value) => String(value) !== currentIssueNumber); if (filteredIssues.length === 0) { - core.info('Codex reported no potential duplicates.'); + core.info('LLMX reported no potential duplicates.'); return; } @@ -121,7 +121,7 @@ jobs: '', ...filteredIssues.map((value) => `- #${String(value)}`), '', - '*Powered by [Codex Action](https://github.com/openai/codex-action)*']; + '*Powered by [LLMX Action](https://github.com/valknar/llmx-action)*']; await github.rest.issues.createComment({ owner: context.repo.owner, diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/issue-labeler.yml index 39f9d47f..23c881f4 100644 --- a/.github/workflows/issue-labeler.yml +++ b/.github/workflows/issue-labeler.yml @@ -30,16 +30,16 @@ jobs: Follow these rules: - Add one (and only one) of the following three labels to distinguish the type of issue. Default to "bug" if unsure. - 1. bug β€” Reproducible defects in Codex products (CLI, VS Code extension, web, auth). + 1. bug β€” Reproducible defects in LLMX products (CLI, VS Code extension, web, auth). 2. enhancement β€” Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks. 3. documentation β€” Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests). - If applicable, add one of the following labels to specify which sub-product or product surface the issue relates to. - 1. CLI β€” the Codex command line interface. + 1. CLI β€” the LLMX command line interface. 2. extension β€” VS Code (or other IDE) extension-specific issues. 3. codex-web β€” Issues targeting the Codex web UI/Cloud experience. - 4. github-action β€” Issues with the Codex GitHub action. - 5. iOS β€” Issues with the Codex iOS app. + 4. github-action β€” Issues with the LLMX GitHub action. + 5. iOS β€” Issues with the LLMX iOS app. - Additionally add zero or more of the following labels that are relevant to the issue content. Prefer a small set of precise labels over many broad ones. 1. windows-os β€” Bugs or friction specific to Windows environments (always when PowerShell is mentioned, path handling, copy/paste, OS-specific auth or tooling failures). @@ -84,7 +84,7 @@ jobs: } apply-labels: - name: Apply labels from Codex output + name: Apply labels from LLMX output needs: gather-labels if: ${{ needs.gather-labels.result != 'skipped' }} runs-on: ubuntu-latest @@ -101,18 +101,18 @@ jobs: run: | json=${CODEX_OUTPUT//$'\r'/} if [ -z "$json" ]; then - echo "Codex produced no output. Skipping label application." + echo "LLMX produced no output. Skipping label application." exit 0 fi if ! printf '%s' "$json" | jq -e 'type == "object" and (.labels | type == "array")' >/dev/null 2>&1; then - echo "Codex output did not include a labels array. Raw output: $json" + echo "LLMX output did not include a labels array. Raw output: $json" exit 0 fi labels=$(printf '%s' "$json" | jq -r '.labels[] | tostring') if [ -z "$labels" ]; then - echo "Codex returned an empty array. Nothing to do." + echo "LLMX returned an empty array. Nothing to do." exit 0 fi diff --git a/llmx-cli/scripts/build_npm_package.py b/llmx-cli/scripts/build_npm_package.py index 158f992f..3d900864 100755 --- a/llmx-cli/scripts/build_npm_package.py +++ b/llmx-cli/scripts/build_npm_package.py @@ -28,7 +28,7 @@ COMPONENT_DEST_DIR: dict[str, str] = { def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.") + parser = argparse.ArgumentParser(description="Build or stage the LLMX CLI npm package.") parser.add_argument( "--package", choices=("llmx", "llmx-responses-api-proxy", "llmx-sdk"), diff --git a/llmx-cli/scripts/install_native_deps.py b/llmx-cli/scripts/install_native_deps.py index 67b4c743..071e2a40 100755 --- a/llmx-cli/scripts/install_native_deps.py +++ b/llmx-cli/scripts/install_native_deps.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -"""Install Codex native binaries (Rust CLI plus ripgrep helpers).""" +"""Install LLMX native binaries (Rust CLI plus ripgrep helpers).""" import argparse import json @@ -64,7 +64,7 @@ DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS] def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Install native Codex binaries.") + parser = argparse.ArgumentParser(description="Install native LLMX binaries.") parser.add_argument( "--workflow-url", help=( diff --git a/llmx-rs/app-server-protocol/src/bin/export.rs b/llmx-rs/app-server-protocol/src/bin/export.rs index 6b305208..7f8518c8 100644 --- a/llmx-rs/app-server-protocol/src/bin/export.rs +++ b/llmx-rs/app-server-protocol/src/bin/export.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; #[derive(Parser, Debug)] #[command( - about = "Generate TypeScript bindings and JSON Schemas for the Codex app-server protocol" + about = "Generate TypeScript bindings and JSON Schemas for the LLMX app-server protocol" )] struct Args { /// Output directory where generated files will be written diff --git a/llmx-rs/app-server/tests/suite/create_conversation.rs b/llmx-rs/app-server/tests/suite/create_conversation.rs index 7a632958..c068f4ab 100644 --- a/llmx-rs/app-server/tests/suite/create_conversation.rs +++ b/llmx-rs/app-server/tests/suite/create_conversation.rs @@ -26,7 +26,7 @@ async fn test_conversation_create_and_send_message_ok() -> Result<()> { let responses = vec![create_final_assistant_message_sse_response("Done")?]; let server = create_mock_chat_completions_server(responses).await; - // Temporary Codex home with config pointing at the mock server. + // Temporary LLMX home with config pointing at the mock server. let codex_home = TempDir::new()?; create_config_toml(codex_home.path(), &server.uri())?; diff --git a/llmx-rs/app-server/tests/suite/fuzzy_file_search.rs b/llmx-rs/app-server/tests/suite/fuzzy_file_search.rs index 19e843f1..da71def4 100644 --- a/llmx-rs/app-server/tests/suite/fuzzy_file_search.rs +++ b/llmx-rs/app-server/tests/suite/fuzzy_file_search.rs @@ -12,7 +12,7 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> { - // Prepare a temporary Codex home and a separate root with test files. + // Prepare a temporary LLMX home and a separate root with test files. let codex_home = TempDir::new()?; let root = TempDir::new()?; diff --git a/llmx-rs/app-server/tests/suite/interrupt.rs b/llmx-rs/app-server/tests/suite/interrupt.rs index 3a37595c..f66bcedb 100644 --- a/llmx-rs/app-server/tests/suite/interrupt.rs +++ b/llmx-rs/app-server/tests/suite/interrupt.rs @@ -49,7 +49,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> { let shell_command = vec!["sleep".to_string(), "10".to_string()]; let tmp = TempDir::new()?; - // Temporary Codex home with config pointing at the mock server. + // Temporary LLMX home with config pointing at the mock server. let codex_home = tmp.path().join("codex_home"); std::fs::create_dir(&codex_home)?; let working_directory = tmp.path().join("workdir"); diff --git a/llmx-rs/app-server/tests/suite/llmx_message_processor_flow.rs b/llmx-rs/app-server/tests/suite/llmx_message_processor_flow.rs index ea72e4d5..24bb5243 100644 --- a/llmx-rs/app-server/tests/suite/llmx_message_processor_flow.rs +++ b/llmx-rs/app-server/tests/suite/llmx_message_processor_flow.rs @@ -41,13 +41,13 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs async fn test_codex_jsonrpc_conversation_flow() -> Result<()> { if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return Ok(()); } let tmp = TempDir::new()?; - // Temporary Codex home with config pointing at the mock server. + // Temporary LLMX home with config pointing at the mock server. let codex_home = tmp.path().join("codex_home"); std::fs::create_dir(&codex_home)?; let working_directory = tmp.path().join("workdir"); @@ -162,7 +162,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> { async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> { if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return Ok(()); } @@ -337,7 +337,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> { async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<()> { if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return Ok(()); } diff --git a/llmx-rs/app-server/tests/suite/send_message.rs b/llmx-rs/app-server/tests/suite/send_message.rs index 61a765f6..49c3ace6 100644 --- a/llmx-rs/app-server/tests/suite/send_message.rs +++ b/llmx-rs/app-server/tests/suite/send_message.rs @@ -26,15 +26,15 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs #[tokio::test] async fn test_send_message_success() -> Result<()> { - // Spin up a mock completions server that immediately ends the Codex turn. - // Two Codex turns hit the mock model (session start + send-user-message). Provide two SSE responses. + // Spin up a mock completions server that immediately ends the LLMX turn. + // Two LLMX turns hit the mock model (session start + send-user-message). Provide two SSE responses. let responses = vec![ create_final_assistant_message_sse_response("Done")?, create_final_assistant_message_sse_response("Done")?, ]; let server = create_mock_chat_completions_server(responses).await; - // Create a temporary Codex home with config pointing at the mock server. + // Create a temporary LLMX home with config pointing at the mock server. let codex_home = TempDir::new()?; create_config_toml(codex_home.path(), &server.uri())?; @@ -215,7 +215,7 @@ async fn test_send_message_raw_notifications_opt_in() -> Result<()> { #[tokio::test] async fn test_send_message_session_not_found() -> Result<()> { - // Start MCP without creating a Codex session + // Start MCP without creating an LLMX session let codex_home = TempDir::new()?; let mut mcp = McpProcess::new(codex_home.path()).await?; timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??; diff --git a/llmx-rs/app-server/tests/suite/v2/turn_start.rs b/llmx-rs/app-server/tests/suite/v2/turn_start.rs index cb1f7eeb..44439a44 100644 --- a/llmx-rs/app-server/tests/suite/v2/turn_start.rs +++ b/llmx-rs/app-server/tests/suite/v2/turn_start.rs @@ -31,7 +31,7 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs #[tokio::test] async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> { // Provide a mock server and config so model wiring is valid. - // Three Codex turns hit the mock model (session start + two turn/start calls). + // Three LLMX turns hit the mock model (session start + two turn/start calls). let responses = vec![ create_final_assistant_message_sse_response("Done")?, create_final_assistant_message_sse_response("Done")?, @@ -131,7 +131,7 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<( #[tokio::test] async fn turn_start_accepts_local_image_input() -> Result<()> { - // Two Codex turns hit the mock model (session start + turn/start). + // Two LLMX turns hit the mock model (session start + turn/start). let responses = vec![ create_final_assistant_message_sse_response("Done")?, create_final_assistant_message_sse_response("Done")?, diff --git a/llmx-rs/arg0/src/lib.rs b/llmx-rs/arg0/src/lib.rs index a5dbc269..97537b33 100644 --- a/llmx-rs/arg0/src/lib.rs +++ b/llmx-rs/arg0/src/lib.rs @@ -54,7 +54,7 @@ pub fn arg0_dispatch() -> Option { match prepend_path_entry_for_codex_aliases() { Ok(path_entry) => Some(path_entry), Err(err) => { - // It is possible that Codex will proceed successfully even if + // It is possible that LLMX will proceed successfully even if // updating the PATH fails, so warn the user and move on. eprintln!("WARNING: proceeding, even though we could not update PATH: {err}"); None diff --git a/llmx-rs/chatgpt/tests/task_turn_fixture.json b/llmx-rs/chatgpt/tests/task_turn_fixture.json index 3750f550..d5ef4dd3 100644 --- a/llmx-rs/chatgpt/tests/task_turn_fixture.json +++ b/llmx-rs/chatgpt/tests/task_turn_fixture.json @@ -56,7 +56,7 @@ }, { "content_type": "text", - "text": "\n\nCodex couldn't run certain commands due to environment limitations. Consider configuring a setup script or internet access in your Codex environment to install dependencies." + "text": "\n\nLLMX couldn't run certain commands due to environment limitations. Consider configuring a setup script or internet access in your LLMX environment to install dependencies." } ] } diff --git a/llmx-rs/cli/src/main.rs b/llmx-rs/cli/src/main.rs index 49f16be4..fd7ec2b3 100644 --- a/llmx-rs/cli/src/main.rs +++ b/llmx-rs/cli/src/main.rs @@ -292,7 +292,7 @@ fn handle_app_exit(exit_info: AppExitInfo) -> anyhow::Result<()> { fn run_update_action(action: UpdateAction) -> anyhow::Result<()> { println!(); let cmd_str = action.command_str(); - println!("Updating Codex via `{cmd_str}`..."); + println!("Updating LLMX via `{cmd_str}`..."); let status = { #[cfg(windows)] @@ -319,7 +319,7 @@ fn run_update_action(action: UpdateAction) -> anyhow::Result<()> { anyhow::bail!("`{cmd_str}` failed with status {status}"); } println!(); - println!("πŸŽ‰ Update ran successfully! Please restart Codex."); + println!("πŸŽ‰ Update ran successfully! Please restart LLMX."); Ok(()) } diff --git a/llmx-rs/common/src/approval_presets.rs b/llmx-rs/common/src/approval_presets.rs index a4c2f1d8..933d5480 100644 --- a/llmx-rs/common/src/approval_presets.rs +++ b/llmx-rs/common/src/approval_presets.rs @@ -24,21 +24,21 @@ pub fn builtin_approval_presets() -> Vec { ApprovalPreset { id: "read-only", label: "Read Only", - description: "Codex can read files and answer questions. Codex requires approval to make edits, run commands, or access network.", + description: "LLMX can read files and answer questions. LLMX requires approval to make edits, run commands, or access network.", approval: AskForApproval::OnRequest, sandbox: SandboxPolicy::ReadOnly, }, ApprovalPreset { id: "auto", label: "Auto", - description: "Codex can read files, make edits, and run commands in the workspace. Codex requires approval to work outside the workspace or access network.", + description: "LLMX can read files, make edits, and run commands in the workspace. LLMX requires approval to work outside the workspace or access network.", approval: AskForApproval::OnRequest, sandbox: SandboxPolicy::new_workspace_write_policy(), }, ApprovalPreset { id: "full-access", label: "Full Access", - description: "Codex can read files, make edits, and run commands with network access, without approval. Exercise caution.", + description: "LLMX can read files, make edits, and run commands with network access, without approval. Exercise caution.", approval: AskForApproval::Never, sandbox: SandboxPolicy::DangerFullAccess, }, diff --git a/llmx-rs/common/src/config_override.rs b/llmx-rs/common/src/config_override.rs index 6555555e..a17328fe 100644 --- a/llmx-rs/common/src/config_override.rs +++ b/llmx-rs/common/src/config_override.rs @@ -1,4 +1,4 @@ -//! Support for `-c key=value` overrides shared across Codex CLI tools. +//! Support for `-c key=value` overrides shared across LLMX CLI tools. //! //! This module provides a [`CliConfigOverrides`] struct that can be embedded //! into a `clap`-derived CLI struct using `#[clap(flatten)]`. Each occurrence diff --git a/llmx-rs/core/src/auth/storage.rs b/llmx-rs/core/src/auth/storage.rs index b3286ee3..97e36e6a 100644 --- a/llmx-rs/core/src/auth/storage.rs +++ b/llmx-rs/core/src/auth/storage.rs @@ -122,7 +122,7 @@ impl AuthStorageBackend for FileAuthStorage { } } -const KEYRING_SERVICE: &str = "Codex Auth"; +const KEYRING_SERVICE: &str = "LLMX Auth"; // turns codex_home path into a stable, short key string fn compute_store_key(codex_home: &Path) -> std::io::Result { diff --git a/llmx-rs/core/src/config/edit.rs b/llmx-rs/core/src/config/edit.rs index 35806782..0e1d2d32 100644 --- a/llmx-rs/core/src/config/edit.rs +++ b/llmx-rs/core/src/config/edit.rs @@ -421,7 +421,7 @@ pub fn apply_blocking( std::fs::create_dir_all(codex_home).with_context(|| { format!( - "failed to create Codex home directory at {}", + "failed to create LLMX home directory at {}", codex_home.display() ) })?; diff --git a/llmx-rs/core/src/default_client.rs b/llmx-rs/core/src/default_client.rs index 68a9ee49..ef879cf3 100644 --- a/llmx-rs/core/src/default_client.rs +++ b/llmx-rs/core/src/default_client.rs @@ -240,17 +240,17 @@ fn sanitize_user_agent(candidate: String, fallback: &str) -> String { .collect(); if !sanitized.is_empty() && HeaderValue::from_str(sanitized.as_str()).is_ok() { tracing::warn!( - "Sanitized Codex user agent because provided suffix contained invalid header characters" + "Sanitized LLMX user agent because provided suffix contained invalid header characters" ); sanitized } else if HeaderValue::from_str(fallback).is_ok() { tracing::warn!( - "Falling back to base Codex user agent because provided suffix could not be sanitized" + "Falling back to base LLMX user agent because provided suffix could not be sanitized" ); fallback.to_string() } else { tracing::warn!( - "Falling back to default Codex originator because base user agent string is invalid" + "Falling back to default LLMX originator because base user agent string is invalid" ); originator().value.clone() } @@ -331,7 +331,7 @@ mod tests { .expect("originator header missing"); assert_eq!(originator_header.to_str().unwrap(), "codex_cli_rs"); - // User-Agent matches the computed Codex UA for that originator + // User-Agent matches the computed LLMX UA for that originator let expected_ua = get_codex_user_agent(); let ua_header = headers .get("user-agent") diff --git a/llmx-rs/core/src/error.rs b/llmx-rs/core/src/error.rs index ee533c46..7d6aa46c 100644 --- a/llmx-rs/core/src/error.rs +++ b/llmx-rs/core/src/error.rs @@ -72,7 +72,7 @@ pub enum CodexErr { Stream(String, Option), #[error( - "Codex ran out of room in the model's context window. Start a new conversation or clear earlier history before retrying." + "LLMX ran out of room in the model's context window. Start a new conversation or clear earlier history before retrying." )] ContextWindowExceeded, @@ -113,7 +113,7 @@ pub enum CodexErr { QuotaExceeded, #[error( - "To use Codex with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing." + "To use LLMX with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing." )] UsageNotIncluded, @@ -321,7 +321,7 @@ impl std::fmt::Display for UsageLimitReachedError { ) } Some(PlanType::Known(KnownPlan::Free)) => { - "You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing)." + "You've hit your usage limit. Upgrade to Plus to continue using LLMX (https://openai.com/chatgpt/pricing)." .to_string() } Some(PlanType::Known(KnownPlan::Pro)) => format!( @@ -596,7 +596,7 @@ mod tests { }; assert_eq!( err.to_string(), - "You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing)." + "You've hit your usage limit. Upgrade to Plus to continue using LLMX (https://openai.com/chatgpt/pricing)." ); } diff --git a/llmx-rs/core/src/event_mapping.rs b/llmx-rs/core/src/event_mapping.rs index f092dc0d..eabe0608 100644 --- a/llmx-rs/core/src/event_mapping.rs +++ b/llmx-rs/core/src/event_mapping.rs @@ -219,7 +219,7 @@ mod tests { id: Some("msg-1".to_string()), role: "assistant".to_string(), content: vec![ContentItem::OutputText { - text: "Hello from Codex".to_string(), + text: "Hello from LLMX".to_string(), }], }; @@ -230,7 +230,7 @@ mod tests { let Some(AgentMessageContent::Text { text }) = message.content.first() else { panic!("expected agent message text content"); }; - assert_eq!(text, "Hello from Codex"); + assert_eq!(text, "Hello from LLMX"); } other => panic!("expected TurnItem::AgentMessage, got {other:?}"), } diff --git a/llmx-rs/core/src/llmx.rs b/llmx-rs/core/src/llmx.rs index 072c3c21..1cb77e77 100644 --- a/llmx-rs/core/src/llmx.rs +++ b/llmx-rs/core/src/llmx.rs @@ -183,7 +183,7 @@ impl Codex { session_source, }; - // Generate a unique ID for the lifetime of this Codex session. + // Generate a unique ID for the lifetime of this LLMX session. let session_source_clone = session_configuration.session_source.clone(); let session = Session::new( session_configuration, @@ -1583,7 +1583,7 @@ mod handlers { pub async fn shutdown(sess: &Arc, sub_id: String) -> bool { sess.abort_all_tasks(TurnAbortReason::Interrupted).await; - info!("Shutting down Codex instance"); + info!("Shutting down LLMX instance"); // Gracefully flush and shutdown rollout recorder on session end so tests // that inspect the rollout file do not race with the background writer. diff --git a/llmx-rs/core/src/mcp_connection_manager.rs b/llmx-rs/core/src/mcp_connection_manager.rs index 1f905a61..3bad6530 100644 --- a/llmx-rs/core/src/mcp_connection_manager.rs +++ b/llmx-rs/core/src/mcp_connection_manager.rs @@ -182,9 +182,9 @@ impl McpConnectionManager { client_info: Implementation { name: "codex-mcp-client".to_owned(), version: env!("CARGO_PKG_VERSION").to_owned(), - title: Some("Codex".into()), - // This field is used by Codex when it is an MCP - // server: it should not be used when Codex is + title: Some("LLMX".into()), + // This field is used by LLMX when it is an MCP + // server: it should not be used when LLMX is // an MCP client. user_agent: None, }, diff --git a/llmx-rs/core/src/model_provider_info.rs b/llmx-rs/core/src/model_provider_info.rs index 993091e9..19158db3 100644 --- a/llmx-rs/core/src/model_provider_info.rs +++ b/llmx-rs/core/src/model_provider_info.rs @@ -1,7 +1,7 @@ -//! Registry of model providers supported by Codex. +//! Registry of model providers supported by LLMX. //! //! Providers can be defined in two places: -//! 1. Built-in defaults compiled into the binary so Codex works out-of-the-box. +//! 1. Built-in defaults compiled into the binary so LLMX works out-of-the-box. //! 2. User-defined entries inside `~/.llmx/config.toml` under the `model_providers` //! key. These override or extend the defaults at runtime. @@ -299,7 +299,7 @@ pub fn built_in_model_providers() -> HashMap { name: "OpenAI".into(), // Allow users to override the default OpenAI endpoint by // exporting `OPENAI_BASE_URL`. This is useful when pointing - // Codex at a proxy, mock server, or Azure-style deployment + // LLMX at a proxy, mock server, or Azure-style deployment // without requiring a full TOML override for the built-in // OpenAI provider. base_url: std::env::var("OPENAI_BASE_URL") diff --git a/llmx-rs/core/src/rollout/policy.rs b/llmx-rs/core/src/rollout/policy.rs index 72b19481..540c7752 100644 --- a/llmx-rs/core/src/rollout/policy.rs +++ b/llmx-rs/core/src/rollout/policy.rs @@ -8,7 +8,7 @@ pub(crate) fn is_persisted_response_item(item: &RolloutItem) -> bool { match item { RolloutItem::ResponseItem(item) => should_persist_response_item(item), RolloutItem::EventMsg(ev) => should_persist_event_msg(ev), - // Persist Codex executive markers so we can analyze flows (e.g., compaction, API turns). + // Persist LLMX executive markers so we can analyze flows (e.g., compaction, API turns). RolloutItem::Compacted(_) | RolloutItem::TurnContext(_) | RolloutItem::SessionMeta(_) => { true } diff --git a/llmx-rs/core/src/rollout/recorder.rs b/llmx-rs/core/src/rollout/recorder.rs index bd2098e6..d47eb5f7 100644 --- a/llmx-rs/core/src/rollout/recorder.rs +++ b/llmx-rs/core/src/rollout/recorder.rs @@ -1,4 +1,4 @@ -//! Persist Codex session rollouts (.jsonl) so sessions can be replayed or inspected later. +//! Persist LLMX session rollouts (.jsonl) so sessions can be replayed or inspected later. use std::fs::File; use std::fs::{self}; diff --git a/llmx-rs/core/src/spawn.rs b/llmx-rs/core/src/spawn.rs index d738f122..6ac79269 100644 --- a/llmx-rs/core/src/spawn.rs +++ b/llmx-rs/core/src/spawn.rs @@ -60,7 +60,7 @@ pub(crate) async fn spawn_child_async( cmd.env(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR, "1"); } - // If this Codex process dies (including being killed via SIGKILL), we want + // If this LLMX process dies (including being killed via SIGKILL), we want // any child processes that were spawned as part of a `"shell"` tool call // to also be terminated. @@ -83,9 +83,9 @@ pub(crate) async fn spawn_child_async( } // Though if there was a race condition and this pre_exec() block is - // run _after_ the parent (i.e., the Codex process) has already + // run _after_ the parent (i.e., the LLMX process) has already // exited, then parent will be the closest configured "subreaper" - // ancestor process, or PID 1 (init). If the Codex process has exited + // ancestor process, or PID 1 (init). If the LLMX process has exited // already, so should the child process. if libc::getppid() != parent_pid { libc::raise(libc::SIGTERM); diff --git a/llmx-rs/core/src/tools/spec.rs b/llmx-rs/core/src/tools/spec.rs index cb1aeafd..d2b5e2fe 100644 --- a/llmx-rs/core/src/tools/spec.rs +++ b/llmx-rs/core/src/tools/spec.rs @@ -360,7 +360,7 @@ fn create_test_sync_tool() -> ToolSpec { ToolSpec::Function(ResponsesApiTool { name: "test_sync_tool".to_string(), - description: "Internal synchronization helper used by Codex integration tests.".to_string(), + description: "Internal synchronization helper used by LLMX integration tests.".to_string(), strict: false, parameters: JsonSchema::Object { properties, diff --git a/llmx-rs/core/tests/chat_completions_payload.rs b/llmx-rs/core/tests/chat_completions_payload.rs index 28896949..030b9e9e 100644 --- a/llmx-rs/core/tests/chat_completions_payload.rs +++ b/llmx-rs/core/tests/chat_completions_payload.rs @@ -193,7 +193,7 @@ fn first_assistant(messages: &[Value]) -> &Value { async fn omits_reasoning_when_none_present() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -210,7 +210,7 @@ async fn omits_reasoning_when_none_present() { async fn attaches_reasoning_to_previous_assistant() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -232,7 +232,7 @@ async fn attaches_reasoning_to_previous_assistant() { async fn attaches_reasoning_to_function_call_anchor() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -259,7 +259,7 @@ async fn attaches_reasoning_to_function_call_anchor() { async fn attaches_reasoning_to_local_shell_call() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -284,7 +284,7 @@ async fn attaches_reasoning_to_local_shell_call() { async fn drops_reasoning_when_last_role_is_user() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -303,7 +303,7 @@ async fn drops_reasoning_when_last_role_is_user() { async fn ignores_reasoning_before_last_user() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -323,7 +323,7 @@ async fn ignores_reasoning_before_last_user() { async fn skips_empty_reasoning_segments() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -344,7 +344,7 @@ async fn skips_empty_reasoning_segments() { async fn suppresses_duplicate_assistant_messages() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } diff --git a/llmx-rs/core/tests/chat_completions_sse.rs b/llmx-rs/core/tests/chat_completions_sse.rs index 16ba1260..39024d7b 100644 --- a/llmx-rs/core/tests/chat_completions_sse.rs +++ b/llmx-rs/core/tests/chat_completions_sse.rs @@ -159,7 +159,7 @@ fn assert_reasoning(item: &ResponseItem, expected: &str) { async fn streams_text_without_reasoning() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -195,7 +195,7 @@ async fn streams_text_without_reasoning() { async fn streams_reasoning_from_string_delta() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -246,7 +246,7 @@ async fn streams_reasoning_from_string_delta() { async fn streams_reasoning_from_object_delta() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -303,7 +303,7 @@ async fn streams_reasoning_from_object_delta() { async fn streams_reasoning_from_final_message() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -335,7 +335,7 @@ async fn streams_reasoning_from_final_message() { async fn streams_reasoning_before_tool_call() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -385,7 +385,7 @@ async fn streams_reasoning_before_tool_call() { async fn chat_sse_emits_failed_on_parse_error() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -422,7 +422,7 @@ async fn chat_sse_emits_failed_on_parse_error() { async fn chat_sse_done_chunk_emits_event() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -445,7 +445,7 @@ async fn chat_sse_done_chunk_emits_event() { async fn chat_sse_emits_error_on_invalid_utf8() { if network_disabled() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } diff --git a/llmx-rs/core/tests/common/lib.rs b/llmx-rs/core/tests/common/lib.rs index 0964a905..23a1b0b2 100644 --- a/llmx-rs/core/tests/common/lib.rs +++ b/llmx-rs/core/tests/common/lib.rs @@ -346,7 +346,7 @@ macro_rules! skip_if_no_network { () => {{ if ::std::env::var($crate::sandbox_network_env_var()).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -354,7 +354,7 @@ macro_rules! skip_if_no_network { ($return_value:expr $(,)?) => {{ if ::std::env::var($crate::sandbox_network_env_var()).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return $return_value; } diff --git a/llmx-rs/core/tests/suite/cli_stream.rs b/llmx-rs/core/tests/suite/cli_stream.rs index de9ebb05..1b243d40 100644 --- a/llmx-rs/core/tests/suite/cli_stream.rs +++ b/llmx-rs/core/tests/suite/cli_stream.rs @@ -126,7 +126,7 @@ async fn exec_cli_applies_experimental_instructions_file() { let custom_path_str = custom_path.to_string_lossy().replace('\\', "/"); // Build a provider override that points at the mock server and instructs - // Codex to use the Responses API with the dummy env var. + // LLMX to use the Responses API with the dummy env var. let provider_override = format!( "model_providers.mock={{ name = \"mock\", base_url = \"{}/v1\", env_key = \"PATH\", wire_api = \"responses\" }}", server.uri() diff --git a/llmx-rs/core/tests/suite/client.rs b/llmx-rs/core/tests/suite/client.rs index 2001504b..50eb4042 100644 --- a/llmx-rs/core/tests/suite/client.rs +++ b/llmx-rs/core/tests/suite/client.rs @@ -240,7 +240,7 @@ async fn resume_includes_initial_messages_and_sends_prior_items() { responses::mount_sse_once_match(&server, path("/v1/responses"), sse_completed("resp1")) .await; - // Configure Codex to resume from our file + // Configure LLMX to resume from our file let model_provider = ModelProviderInfo { base_url: Some(format!("{}/v1", server.uri())), ..built_in_model_providers()["openai"].clone() @@ -1325,7 +1325,7 @@ fn create_dummy_codex_auth() -> CodexAuth { /// We assert that the `input` sent on each turn contains the expected conversation history #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn history_dedupes_streamed_and_final_messages_across_turns() { - // Skip under Codex sandbox network restrictions (mirrors other tests). + // Skip under LLMX sandbox network restrictions (mirrors other tests). skip_if_no_network!(); // Mock server that will receive three sequential requests and return the same SSE stream diff --git a/llmx-rs/core/tests/suite/compact.rs b/llmx-rs/core/tests/suite/compact.rs index de7faa0d..4c7eddf1 100644 --- a/llmx-rs/core/tests/suite/compact.rs +++ b/llmx-rs/core/tests/suite/compact.rs @@ -122,7 +122,7 @@ async fn summarize_context_three_requests_and_instructions() { }; let third_request_mock = mount_sse_once_match(&server, third_matcher, sse3).await; - // Build config pointing to the mock server and spawn Codex. + // Build config pointing to the mock server and spawn LLMX. let model_provider = ModelProviderInfo { base_url: Some(format!("{}/v1", server.uri())), ..built_in_model_providers()["openai"].clone() @@ -257,7 +257,7 @@ async fn summarize_context_three_requests_and_instructions() { "third request should not include the summarize trigger" ); - // Shut down Codex to flush rollout entries before inspecting the file. + // Shut down LLMX to flush rollout entries before inspecting the file. codex.submit(Op::Shutdown).await.unwrap(); wait_for_event(&codex, |ev| matches!(ev, EventMsg::ShutdownComplete)).await; diff --git a/llmx-rs/core/tests/suite/compact_resume_fork.rs b/llmx-rs/core/tests/suite/compact_resume_fork.rs index 808d55ad..62c46cd5 100644 --- a/llmx-rs/core/tests/suite/compact_resume_fork.rs +++ b/llmx-rs/core/tests/suite/compact_resume_fork.rs @@ -4,7 +4,7 @@ //! //! Each test sets up a mocked SSE conversation and drives the conversation through //! a specific sequence of operations. After every operation we capture the -//! request payload that Codex would send to the model and assert that the +//! request payload that LLMX would send to the model and assert that the //! model-visible history matches the expected sequence of messages. use super::compact::COMPACT_WARNING_MESSAGE; diff --git a/llmx-rs/core/tests/suite/fork_conversation.rs b/llmx-rs/core/tests/suite/fork_conversation.rs index cb4e2010..1742ca44 100644 --- a/llmx-rs/core/tests/suite/fork_conversation.rs +++ b/llmx-rs/core/tests/suite/fork_conversation.rs @@ -44,7 +44,7 @@ async fn fork_conversation_twice_drops_to_first_message() { .mount(&server) .await; - // Configure Codex to use the mock server. + // Configure LLMX to use the mock server. let model_provider = ModelProviderInfo { base_url: Some(format!("{}/v1", server.uri())), ..built_in_model_providers()["openai"].clone() diff --git a/llmx-rs/core/tests/suite/quota_exceeded.rs b/llmx-rs/core/tests/suite/quota_exceeded.rs index 6e55cd62..ae1a0b4b 100644 --- a/llmx-rs/core/tests/suite/quota_exceeded.rs +++ b/llmx-rs/core/tests/suite/quota_exceeded.rs @@ -66,7 +66,7 @@ async fn quota_exceeded_emits_single_error_event() -> Result<()> { } } - assert_eq!(error_events, 1, "expected exactly one Codex:Error event"); + assert_eq!(error_events, 1, "expected exactly one LLMX:Error event"); Ok(()) } diff --git a/llmx-rs/core/tests/suite/review.rs b/llmx-rs/core/tests/suite/review.rs index 16b49915..5745f445 100644 --- a/llmx-rs/core/tests/suite/review.rs +++ b/llmx-rs/core/tests/suite/review.rs @@ -40,7 +40,7 @@ use wiremock::matchers::path; /// in that order when the model returns a structured review JSON payload. #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn review_op_emits_lifecycle_and_review_output() { - // Skip under Codex sandbox network restrictions. + // Skip under LLMX sandbox network restrictions. skip_if_no_network!(); // Start mock Responses API server. Return a single assistant message whose diff --git a/llmx-rs/core/tests/suite/undo.rs b/llmx-rs/core/tests/suite/undo.rs index 88579089..969c75b7 100644 --- a/llmx-rs/core/tests/suite/undo.rs +++ b/llmx-rs/core/tests/suite/undo.rs @@ -68,12 +68,12 @@ fn init_git_repo(path: &Path) -> Result<()> { // CI variance (default-branch hints, line ending differences, etc.). git(path, &["init", "--initial-branch=main"])?; git(path, &["config", "core.autocrlf", "false"])?; - git(path, &["config", "user.name", "Codex Tests"])?; + git(path, &["config", "user.name", "LLMX Tests"])?; git(path, &["config", "user.email", "codex-tests@example.com"])?; // Create README.txt let readme_path = path.join("README.txt"); - fs::write(&readme_path, "Test repository initialized by Codex.\n")?; + fs::write(&readme_path, "Test repository initialized by LLMX.\n")?; // Stage and commit git(path, &["add", "README.txt"])?; diff --git a/llmx-rs/core/tests/suite/view_image.rs b/llmx-rs/core/tests/suite/view_image.rs index 86a09152..d6dcc4b8 100644 --- a/llmx-rs/core/tests/suite/view_image.rs +++ b/llmx-rs/core/tests/suite/view_image.rs @@ -382,7 +382,7 @@ async fn view_image_tool_placeholder_for_non_image_files() -> anyhow::Result<()> content.iter().find_map(|span| { if span.get("type").and_then(Value::as_str) == Some("input_text") { let text = span.get("text").and_then(Value::as_str)?; - if text.contains("Codex could not read the local image at") + if text.contains("LLMX could not read the local image at") && text.contains("unsupported MIME type `application/json`") { return Some(text.to_string()); diff --git a/llmx-rs/exec/src/event_processor_with_human_output.rs b/llmx-rs/exec/src/event_processor_with_human_output.rs index ec007dc1..3047eaac 100644 --- a/llmx-rs/exec/src/event_processor_with_human_output.rs +++ b/llmx-rs/exec/src/event_processor_with_human_output.rs @@ -136,7 +136,7 @@ impl EventProcessor for EventProcessorWithHumanOutput { const VERSION: &str = env!("CARGO_PKG_VERSION"); ts_msg!( self, - "OpenAI Codex v{} (research preview)\n--------", + "LLMX v{} (research preview)\n--------", VERSION ); diff --git a/llmx-rs/exec/src/lib.rs b/llmx-rs/exec/src/lib.rs index f0508c18..be3721f0 100644 --- a/llmx-rs/exec/src/lib.rs +++ b/llmx-rs/exec/src/lib.rs @@ -96,7 +96,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option) -> any // Ensure the user knows we are waiting on stdin, as they may // have gotten into this state by mistake. If so, and they are not - // writing to stdin, Codex will hang indefinitely, so this should + // writing to stdin, LLMX will hang indefinitely, so this should // help them debug in that case. if !force_stdin { eprintln!("Reading prompt from stdin..."); @@ -278,11 +278,11 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option) -> any .new_conversation(config.clone()) .await? }; - // Print the effective configuration and prompt so users can see what Codex + // Print the effective configuration and prompt so users can see what LLMX // is using. event_processor.print_config_summary(&config, &prompt, &session_configured); - info!("Codex initialized with event: {session_configured:?}"); + info!("LLMX initialized with event: {session_configured:?}"); let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::(); { @@ -292,7 +292,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option) -> any tokio::select! { _ = tokio::signal::ctrl_c() => { tracing::debug!("Keyboard interrupt"); - // Immediately notify Codex to abort any in‑flight task. + // Immediately notify LLMX to abort any in‑flight task. conversation.submit(Op::Interrupt).await.ok(); // Exit the inner loop and return to the main input prompt. The codex diff --git a/llmx-rs/feedback/src/lib.rs b/llmx-rs/feedback/src/lib.rs index f089da2e..f5362924 100644 --- a/llmx-rs/feedback/src/lib.rs +++ b/llmx-rs/feedback/src/lib.rs @@ -214,7 +214,7 @@ impl CodexLogSnapshot { let mut envelope = Envelope::new(); let title = format!( - "[{}]: Codex session {}", + "[{}]: LLMX session {}", display_classification(classification), self.thread_id ); diff --git a/llmx-rs/login/src/device_code_auth.rs b/llmx-rs/login/src/device_code_auth.rs index acaf30ba..98db87ad 100644 --- a/llmx-rs/login/src/device_code_auth.rs +++ b/llmx-rs/login/src/device_code_auth.rs @@ -75,7 +75,7 @@ async fn request_user_code( let status = resp.status(); if status == StatusCode::NOT_FOUND { return Err(std::io::Error::other( - "device code login is not enabled for this Codex server. Use the browser login or verify the server URL.", + "device code login is not enabled for this LLMX server. Use the browser login or verify the server URL.", )); } diff --git a/llmx-rs/mcp-server/src/exec_approval.rs b/llmx-rs/mcp-server/src/exec_approval.rs index fee9ba45..36e1c3b5 100644 --- a/llmx-rs/mcp-server/src/exec_approval.rs +++ b/llmx-rs/mcp-server/src/exec_approval.rs @@ -67,7 +67,7 @@ pub(crate) async fn handle_exec_approval_request( let escaped_command = shlex::try_join(command.iter().map(String::as_str)).unwrap_or_else(|_| command.join(" ")); let message = format!( - "Allow Codex to run `{escaped_command}` in `{cwd}`?", + "Allow LLMX to run `{escaped_command}` in `{cwd}`?", cwd = cwd.to_string_lossy() ); diff --git a/llmx-rs/mcp-server/src/llmx_tool_config.rs b/llmx-rs/mcp-server/src/llmx_tool_config.rs index 8787271e..de34f905 100644 --- a/llmx-rs/mcp-server/src/llmx_tool_config.rs +++ b/llmx-rs/mcp-server/src/llmx_tool_config.rs @@ -16,7 +16,7 @@ use std::path::PathBuf; #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)] #[serde(rename_all = "kebab-case")] pub struct CodexToolCallParam { - /// The *initial user prompt* to start the Codex conversation. + /// The *initial user prompt* to start the LLMX conversation. pub prompt: String, /// Optional override for the model name (e.g. "o3", "o4-mini"). @@ -113,7 +113,7 @@ pub(crate) fn create_tool_for_codex_tool_call_param() -> Tool { #[expect(clippy::expect_used)] let schema_value = - serde_json::to_value(&schema).expect("Codex tool schema should serialise to JSON"); + serde_json::to_value(&schema).expect("LLMX tool schema should serialise to JSON"); let tool_input_schema = serde_json::from_value::(schema_value).unwrap_or_else(|e| { @@ -122,19 +122,19 @@ pub(crate) fn create_tool_for_codex_tool_call_param() -> Tool { Tool { name: "codex".to_string(), - title: Some("Codex".to_string()), + title: Some("LLMX".to_string()), input_schema: tool_input_schema, // TODO(mbolin): This should be defined. output_schema: None, description: Some( - "Run a Codex session. Accepts configuration parameters matching the Codex Config struct.".to_string(), + "Run an LLMX session. Accepts configuration parameters matching the LLMX Config struct.".to_string(), ), annotations: None, } } impl CodexToolCallParam { - /// Returns the initial user prompt to start the Codex conversation and the + /// Returns the initial user prompt to start the LLMX conversation and the /// effective Config object generated from the supplied parameters. pub async fn into_config( self, @@ -189,10 +189,10 @@ impl CodexToolCallParam { #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "camelCase")] pub struct CodexToolCallReplyParam { - /// The conversation id for this Codex session. + /// The conversation id for this LLMX session. pub conversation_id: String, - /// The *next user prompt* to continue the Codex conversation. + /// The *next user prompt* to continue the LLMX conversation. pub prompt: String, } @@ -208,7 +208,7 @@ pub(crate) fn create_tool_for_codex_tool_call_reply_param() -> Tool { #[expect(clippy::expect_used)] let schema_value = - serde_json::to_value(&schema).expect("Codex reply tool schema should serialise to JSON"); + serde_json::to_value(&schema).expect("LLMX reply tool schema should serialise to JSON"); let tool_input_schema = serde_json::from_value::(schema_value).unwrap_or_else(|e| { @@ -217,11 +217,11 @@ pub(crate) fn create_tool_for_codex_tool_call_reply_param() -> Tool { Tool { name: "codex-reply".to_string(), - title: Some("Codex Reply".to_string()), + title: Some("LLMX Reply".to_string()), input_schema: tool_input_schema, output_schema: None, description: Some( - "Continue a Codex conversation by providing the conversation id and prompt." + "Continue an LLMX conversation by providing the conversation id and prompt." .to_string(), ), annotations: None, @@ -250,8 +250,8 @@ mod tests { let tool_json = serde_json::to_value(&tool).expect("tool serializes"); let expected_tool_json = serde_json::json!({ "name": "codex", - "title": "Codex", - "description": "Run a Codex session. Accepts configuration parameters matching the Codex Config struct.", + "title": "LLMX", + "description": "Run an LLMX session. Accepts configuration parameters matching the LLMX Config struct.", "inputSchema": { "type": "object", "properties": { @@ -292,7 +292,7 @@ mod tests { "type": "string" }, "prompt": { - "description": "The *initial user prompt* to start the Codex conversation.", + "description": "The *initial user prompt* to start the LLMX conversation.", "type": "string" }, "base-instructions": { @@ -321,15 +321,15 @@ mod tests { let tool = create_tool_for_codex_tool_call_reply_param(); let tool_json = serde_json::to_value(&tool).expect("tool serializes"); let expected_tool_json = serde_json::json!({ - "description": "Continue a Codex conversation by providing the conversation id and prompt.", + "description": "Continue an LLMX conversation by providing the conversation id and prompt.", "inputSchema": { "properties": { "conversationId": { - "description": "The conversation id for this Codex session.", + "description": "The conversation id for this LLMX session.", "type": "string" }, "prompt": { - "description": "The *next user prompt* to continue the Codex conversation.", + "description": "The *next user prompt* to continue the LLMX conversation.", "type": "string" }, }, @@ -340,7 +340,7 @@ mod tests { "type": "object", }, "name": "codex-reply", - "title": "Codex Reply", + "title": "LLMX Reply", }); assert_eq!(expected_tool_json, tool_json); } diff --git a/llmx-rs/mcp-server/src/llmx_tool_runner.rs b/llmx-rs/mcp-server/src/llmx_tool_runner.rs index 522b079f..a5ff3310 100644 --- a/llmx-rs/mcp-server/src/llmx_tool_runner.rs +++ b/llmx-rs/mcp-server/src/llmx_tool_runner.rs @@ -1,4 +1,4 @@ -//! Asynchronous worker that executes a **Codex** tool-call inside a spawned +//! Asynchronous worker that executes an **LLMX** tool-call inside a spawned //! Tokio task. Separated from `message_processor.rs` to keep that file small //! and to make future feature-growth easier to manage. @@ -54,7 +54,7 @@ pub async fn run_codex_tool_session( let result = CallToolResult { content: vec![ContentBlock::TextContent(TextContent { r#type: "text".to_string(), - text: format!("Failed to start Codex session: {e}"), + text: format!("Failed to start LLMX session: {e}"), annotations: None, })], is_error: Some(true), @@ -77,7 +77,7 @@ pub async fn run_codex_tool_session( ) .await; - // Use the original MCP request ID as the `sub_id` for the Codex submission so that + // Use the original MCP request ID as the `sub_id` for the LLMX submission so that // any events emitted for this tool-call can be correlated with the // originating `tools/call` request. let sub_id = match &id { @@ -197,7 +197,7 @@ async fn run_codex_tool_session_inner( continue; } EventMsg::Error(err_event) => { - // Return a response to conclude the tool call when the Codex session reports an error (e.g., interruption). + // Return a response to conclude the tool call when the LLMX session reports an error (e.g., interruption). let result = json!({ "error": err_event.message, }); @@ -311,7 +311,7 @@ async fn run_codex_tool_session_inner( let result = CallToolResult { content: vec![ContentBlock::TextContent(TextContent { r#type: "text".to_string(), - text: format!("Codex runtime error: {e}"), + text: format!("LLMX runtime error: {e}"), annotations: None, })], is_error: Some(true), diff --git a/llmx-rs/mcp-server/src/message_processor.rs b/llmx-rs/mcp-server/src/message_processor.rs index dfee4874..d9c9fe04 100644 --- a/llmx-rs/mcp-server/src/message_processor.rs +++ b/llmx-rs/mcp-server/src/message_processor.rs @@ -219,7 +219,7 @@ impl MessageProcessor { server_info: mcp_types::Implementation { name: "codex-mcp-server".to_string(), version: env!("CARGO_PKG_VERSION").to_string(), - title: Some("Codex".to_string()), + title: Some("LLMX".to_string()), user_agent: Some(get_codex_user_agent()), }, }; @@ -356,7 +356,7 @@ impl MessageProcessor { content: vec![ContentBlock::TextContent(TextContent { r#type: "text".to_owned(), text: format!( - "Failed to load Codex configuration from overrides: {e}" + "Failed to load LLMX configuration from overrides: {e}" ), annotations: None, })], @@ -372,7 +372,7 @@ impl MessageProcessor { let result = CallToolResult { content: vec![ContentBlock::TextContent(TextContent { r#type: "text".to_owned(), - text: format!("Failed to parse configuration for Codex tool: {e}"), + text: format!("Failed to parse configuration for LLMX tool: {e}"), annotations: None, })], is_error: Some(true), @@ -406,10 +406,10 @@ impl MessageProcessor { let conversation_manager = self.conversation_manager.clone(); let running_requests_id_to_codex_uuid = self.running_requests_id_to_codex_uuid.clone(); - // Spawn an async task to handle the Codex session so that we do not + // Spawn an async task to handle the LLMX session so that we do not // block the synchronous message-processing loop. task::spawn(async move { - // Run the Codex session and stream events back to the client. + // Run the LLMX session and stream events back to the client. crate::llmx_tool_runner::run_codex_tool_session( id, initial_prompt, @@ -437,11 +437,11 @@ impl MessageProcessor { Some(json_val) => match serde_json::from_value::(json_val) { Ok(params) => params, Err(e) => { - tracing::error!("Failed to parse Codex tool call reply parameters: {e}"); + tracing::error!("Failed to parse LLMX tool call reply parameters: {e}"); let result = CallToolResult { content: vec![ContentBlock::TextContent(TextContent { r#type: "text".to_owned(), - text: format!("Failed to parse configuration for Codex tool: {e}"), + text: format!("Failed to parse configuration for LLMX tool: {e}"), annotations: None, })], is_error: Some(true), @@ -577,7 +577,7 @@ impl MessageProcessor { }; tracing::info!("conversation_id: {conversation_id}"); - // Obtain the Codex conversation from the server. + // Obtain the LLMX conversation from the server. let codex_arc = match self .conversation_manager .get_conversation(conversation_id) @@ -590,7 +590,7 @@ impl MessageProcessor { } }; - // Submit interrupt to Codex. + // Submit interrupt to LLMX. let err = codex_arc .submit_with_id(Submission { id: request_id_string, @@ -598,7 +598,7 @@ impl MessageProcessor { }) .await; if let Err(e) = err { - tracing::error!("Failed to submit interrupt to Codex: {e}"); + tracing::error!("Failed to submit interrupt to LLMX: {e}"); return; } // unregister the id so we don't keep it in the map diff --git a/llmx-rs/mcp-server/src/patch_approval.rs b/llmx-rs/mcp-server/src/patch_approval.rs index a24861dc..28e1af1c 100644 --- a/llmx-rs/mcp-server/src/patch_approval.rs +++ b/llmx-rs/mcp-server/src/patch_approval.rs @@ -56,7 +56,7 @@ pub(crate) async fn handle_patch_approval_request( if let Some(r) = &reason { message_lines.push(r.clone()); } - message_lines.push("Allow Codex to apply proposed code changes?".to_string()); + message_lines.push("Allow LLMX to apply proposed code changes?".to_string()); let params = PatchApprovalElicitRequestParams { message: message_lines.join("\n"), diff --git a/llmx-rs/mcp-server/tests/common/mcp_process.rs b/llmx-rs/mcp-server/tests/common/mcp_process.rs index 08c6ef24..1006d212 100644 --- a/llmx-rs/mcp-server/tests/common/mcp_process.rs +++ b/llmx-rs/mcp-server/tests/common/mcp_process.rs @@ -162,7 +162,7 @@ impl McpProcess { }, "serverInfo": { "name": "codex-mcp-server", - "title": "Codex", + "title": "LLMX", "version": "0.0.0", "user_agent": user_agent }, diff --git a/llmx-rs/mcp-server/tests/suite/llmx_tool.rs b/llmx-rs/mcp-server/tests/suite/llmx_tool.rs index e871902d..a9047d1b 100644 --- a/llmx-rs/mcp-server/tests/suite/llmx_tool.rs +++ b/llmx-rs/mcp-server/tests/suite/llmx_tool.rs @@ -42,7 +42,7 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs async fn test_shell_command_approval_triggers_elicitation() { if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -173,7 +173,7 @@ fn create_expected_elicitation_request( codex_event_id: String, ) -> anyhow::Result { let expected_message = format!( - "Allow Codex to run `{}` in `{}`?", + "Allow LLMX to run `{}` in `{}`?", shlex::try_join(command.iter().map(std::convert::AsRef::as_ref))?, workdir.to_string_lossy() ); @@ -207,7 +207,7 @@ fn create_expected_elicitation_request( async fn test_patch_approval_triggers_elicitation() { if env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() { println!( - "Skipping test because it cannot execute when network is disabled in a Codex sandbox." + "Skipping test because it cannot execute when network is disabled in an LLMX sandbox." ); return; } @@ -405,7 +405,7 @@ fn create_expected_patch_approval_elicitation_request( if let Some(r) = &reason { message_lines.push(r.clone()); } - message_lines.push("Allow Codex to apply proposed code changes?".to_string()); + message_lines.push("Allow LLMX to apply proposed code changes?".to_string()); Ok(JSONRPCRequest { jsonrpc: JSONRPC_VERSION.into(), diff --git a/llmx-rs/mcp-types/generate_mcp_types.py b/llmx-rs/mcp-types/generate_mcp_types.py index 7535b4c7..3ef9d58a 100755 --- a/llmx-rs/mcp-types/generate_mcp_types.py +++ b/llmx-rs/mcp-types/generate_mcp_types.py @@ -385,7 +385,7 @@ def define_struct( else: fields.append(StructField("pub", rs_prop.name, prop_type, rs_prop.serde, rs_prop.ts)) - # Special-case: add Codex-specific user_agent to Implementation + # Special-case: add LLMX-specific user_agent to Implementation if name == "Implementation": fields.append( StructField( @@ -394,7 +394,7 @@ def define_struct( "Option", '#[serde(default, skip_serializing_if = "Option::is_none")]', '#[ts(optional)]', - "This is an extra field that the Codex MCP server sends as part of InitializeResult.", + "This is an extra field that the LLMX MCP server sends as part of InitializeResult.", ) ) diff --git a/llmx-rs/mcp-types/src/lib.rs b/llmx-rs/mcp-types/src/lib.rs index e390322f..f43c336e 100644 --- a/llmx-rs/mcp-types/src/lib.rs +++ b/llmx-rs/mcp-types/src/lib.rs @@ -527,7 +527,7 @@ pub struct Implementation { #[ts(optional)] pub title: Option, pub version: String, - // This is an extra field that the Codex MCP server sends as part of InitializeResult. + // This is an extra field that the LLMX MCP server sends as part of InitializeResult. #[serde(default, skip_serializing_if = "Option::is_none")] #[ts(optional)] pub user_agent: Option, diff --git a/llmx-rs/process-hardening/src/lib.rs b/llmx-rs/process-hardening/src/lib.rs index a787b409..61f32b44 100644 --- a/llmx-rs/process-hardening/src/lib.rs +++ b/llmx-rs/process-hardening/src/lib.rs @@ -38,7 +38,7 @@ pub(crate) fn pre_main_hardening_linux() { // For "defense in depth," set the core file size limit to 0. set_core_file_size_limit_to_zero(); - // Official Codex releases are MUSL-linked, which means that variables such + // Official LLMX releases are MUSL-linked, which means that variables such // as LD_PRELOAD are ignored anyway, but just to be sure, clear them here. let ld_keys: Vec = std::env::vars() .filter_map(|(key, _)| { diff --git a/llmx-rs/protocol/src/models.rs b/llmx-rs/protocol/src/models.rs index a902b054..7cd980ba 100644 --- a/llmx-rs/protocol/src/models.rs +++ b/llmx-rs/protocol/src/models.rs @@ -151,7 +151,7 @@ fn local_image_error_placeholder( ) -> ContentItem { ContentItem::InputText { text: format!( - "Codex could not read the local image at `{}`: {}", + "LLMX could not read the local image at `{}`: {}", path.display(), error ), diff --git a/llmx-rs/protocol/src/protocol.rs b/llmx-rs/protocol/src/protocol.rs index 2c60046c..4e6084dd 100644 --- a/llmx-rs/protocol/src/protocol.rs +++ b/llmx-rs/protocol/src/protocol.rs @@ -1,4 +1,4 @@ -//! Defines the protocol for a Codex session between a client and an agent. +//! Defines the protocol for an LLMX session between a client and an agent. //! //! Uses a SQ (Submission Queue) / EQ (Event Queue) pattern to asynchronously communicate //! between user and agent. @@ -45,7 +45,7 @@ pub const USER_INSTRUCTIONS_OPEN_TAG: &str = ""; pub const USER_INSTRUCTIONS_CLOSE_TAG: &str = ""; pub const ENVIRONMENT_CONTEXT_OPEN_TAG: &str = ""; pub const ENVIRONMENT_CONTEXT_CLOSE_TAG: &str = ""; -pub const USER_MESSAGE_BEGIN: &str = "## My request for Codex:"; +pub const USER_MESSAGE_BEGIN: &str = "## My request for LLMX:"; /// Submission Queue Entry - requests from user #[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] diff --git a/llmx-rs/rmcp-client/src/bin/test_stdio_server.rs b/llmx-rs/rmcp-client/src/bin/test_stdio_server.rs index aafba593..8146dbab 100644 --- a/llmx-rs/rmcp-client/src/bin/test_stdio_server.rs +++ b/llmx-rs/rmcp-client/src/bin/test_stdio_server.rs @@ -103,7 +103,7 @@ impl TestToolServer { let raw = RawResourceTemplate { uri_template: "memo://codex/{slug}".to_string(), name: "codex-memo".to_string(), - title: Some("Codex Memo".to_string()), + title: Some("LLMX Memo".to_string()), description: Some( "Template for memo://codex/{slug} resources used in tests.".to_string(), ), diff --git a/llmx-rs/rmcp-client/src/bin/test_streamable_http_server.rs b/llmx-rs/rmcp-client/src/bin/test_streamable_http_server.rs index f56a8582..99c145ed 100644 --- a/llmx-rs/rmcp-client/src/bin/test_streamable_http_server.rs +++ b/llmx-rs/rmcp-client/src/bin/test_streamable_http_server.rs @@ -100,7 +100,7 @@ impl TestToolServer { let raw = RawResourceTemplate { uri_template: "memo://codex/{slug}".to_string(), name: "codex-memo".to_string(), - title: Some("Codex Memo".to_string()), + title: Some("LLMX Memo".to_string()), description: Some( "Template for memo://codex/{slug} resources used in tests.".to_string(), ), diff --git a/llmx-rs/rmcp-client/src/oauth.rs b/llmx-rs/rmcp-client/src/oauth.rs index c93e68aa..5914a9d1 100644 --- a/llmx-rs/rmcp-client/src/oauth.rs +++ b/llmx-rs/rmcp-client/src/oauth.rs @@ -49,7 +49,7 @@ use tokio::sync::Mutex; use crate::find_llmx_home::find_llmx_home; -const KEYRING_SERVICE: &str = "Codex MCP Credentials"; +const KEYRING_SERVICE: &str = "LLMX MCP Credentials"; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct StoredOAuthTokens { diff --git a/llmx-rs/rmcp-client/src/perform_oauth_login.rs b/llmx-rs/rmcp-client/src/perform_oauth_login.rs index 425e124d..e2452481 100644 --- a/llmx-rs/rmcp-client/src/perform_oauth_login.rs +++ b/llmx-rs/rmcp-client/src/perform_oauth_login.rs @@ -64,7 +64,7 @@ pub async fn perform_oauth_login( let mut oauth_state = OAuthState::new(server_url, Some(http_client)).await?; let scope_refs: Vec<&str> = scopes.iter().map(String::as_str).collect(); oauth_state - .start_authorization(&scope_refs, &redirect_uri, Some("Codex")) + .start_authorization(&scope_refs, &redirect_uri, Some("LLMX")) .await?; let auth_url = oauth_state.get_authorization_url().await?; diff --git a/llmx-rs/rmcp-client/tests/resources.rs b/llmx-rs/rmcp-client/tests/resources.rs index 6fc1ff37..f636f2a0 100644 --- a/llmx-rs/rmcp-client/tests/resources.rs +++ b/llmx-rs/rmcp-client/tests/resources.rs @@ -36,7 +36,7 @@ fn init_params() -> InitializeRequestParams { client_info: Implementation { name: "codex-test".into(), version: "0.0.0-test".into(), - title: Some("Codex rmcp resource test".into()), + title: Some("LLMX rmcp resource test".into()), user_agent: None, }, protocol_version: mcp_types::MCP_SCHEMA_VERSION.to_string(), @@ -92,7 +92,7 @@ async fn rmcp_client_can_list_and_read_resources() -> anyhow::Result<()> { ), mime_type: Some("text/plain".to_string()), name: "codex-memo".to_string(), - title: Some("Codex Memo".to_string()), + title: Some("LLMX Memo".to_string()), uri_template: "memo://codex/{slug}".to_string(), }], } diff --git a/llmx-rs/tui/src/bottom_pane/approval_overlay.rs b/llmx-rs/tui/src/bottom_pane/approval_overlay.rs index 68277c98..e47badd9 100644 --- a/llmx-rs/tui/src/bottom_pane/approval_overlay.rs +++ b/llmx-rs/tui/src/bottom_pane/approval_overlay.rs @@ -399,7 +399,7 @@ fn exec_options() -> Vec { additional_shortcuts: vec![key_hint::plain(KeyCode::Char('a'))], }, ApprovalOption { - label: "No, and tell Codex what to do differently".to_string(), + label: "No, and tell LLMX what to do differently".to_string(), decision: ReviewDecision::Abort, display_shortcut: Some(key_hint::plain(KeyCode::Esc)), additional_shortcuts: vec![key_hint::plain(KeyCode::Char('n'))], @@ -416,7 +416,7 @@ fn patch_options() -> Vec { additional_shortcuts: vec![key_hint::plain(KeyCode::Char('y'))], }, ApprovalOption { - label: "No, and tell Codex what to do differently".to_string(), + label: "No, and tell LLMX what to do differently".to_string(), decision: ReviewDecision::Abort, display_shortcut: Some(key_hint::plain(KeyCode::Esc)), additional_shortcuts: vec![key_hint::plain(KeyCode::Char('n'))], diff --git a/llmx-rs/tui/src/bottom_pane/chat_composer.rs b/llmx-rs/tui/src/bottom_pane/chat_composer.rs index 567da98e..23270383 100644 --- a/llmx-rs/tui/src/bottom_pane/chat_composer.rs +++ b/llmx-rs/tui/src/bottom_pane/chat_composer.rs @@ -1706,7 +1706,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -1766,7 +1766,7 @@ mod tests { true, sender, enhanced_keys_supported, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); setup(&mut composer); @@ -1845,7 +1845,7 @@ mod tests { true, sender, true, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -1870,7 +1870,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -1896,7 +1896,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -1937,7 +1937,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2111,7 +2111,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2140,7 +2140,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2169,7 +2169,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2196,7 +2196,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2231,7 +2231,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2272,7 +2272,7 @@ mod tests { true, sender.clone(), false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2315,7 +2315,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2343,7 +2343,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); type_chars_humanlike(&mut composer, &['/', 'm', 'o']); @@ -2386,7 +2386,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2439,7 +2439,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2460,7 +2460,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2496,7 +2496,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2531,7 +2531,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2610,7 +2610,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2682,7 +2682,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2730,7 +2730,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); let path = PathBuf::from("/tmp/image1.png"); @@ -2754,7 +2754,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); let path = PathBuf::from("/tmp/image2.png"); @@ -2779,7 +2779,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); let path = PathBuf::from("/tmp/image3.png"); @@ -2820,7 +2820,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2851,7 +2851,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2908,7 +2908,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2935,7 +2935,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -2971,7 +2971,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3005,7 +3005,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3043,7 +3043,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3079,7 +3079,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3109,7 +3109,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3159,7 +3159,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3212,7 +3212,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3249,7 +3249,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3280,7 +3280,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3316,7 +3316,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3353,7 +3353,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3391,7 +3391,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3435,7 +3435,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); @@ -3467,7 +3467,7 @@ mod tests { true, sender, false, - "Ask Codex to do anything".to_string(), + "Ask LLMX to do anything".to_string(), false, ); diff --git a/llmx-rs/tui/src/bottom_pane/feedback_view.rs b/llmx-rs/tui/src/bottom_pane/feedback_view.rs index 2caab67d..60458abf 100644 --- a/llmx-rs/tui/src/bottom_pane/feedback_view.rs +++ b/llmx-rs/tui/src/bottom_pane/feedback_view.rs @@ -422,7 +422,7 @@ pub(crate) fn feedback_upload_consent_params( super::SelectionItem { name: "Yes".to_string(), description: Some( - "Share the current Codex session logs with the team for troubleshooting." + "Share the current LLMX session logs with the team for troubleshooting." .to_string(), ), actions: vec![yes_action], diff --git a/llmx-rs/tui/src/bottom_pane/list_selection_view.rs b/llmx-rs/tui/src/bottom_pane/list_selection_view.rs index 44d7b264..0f0ca514 100644 --- a/llmx-rs/tui/src/bottom_pane/list_selection_view.rs +++ b/llmx-rs/tui/src/bottom_pane/list_selection_view.rs @@ -441,14 +441,14 @@ mod tests { let items = vec![ SelectionItem { name: "Read Only".to_string(), - description: Some("Codex can read files".to_string()), + description: Some("LLMX can read files".to_string()), is_current: true, dismiss_on_select: true, ..Default::default() }, SelectionItem { name: "Full Access".to_string(), - description: Some("Codex can edit files".to_string()), + description: Some("LLMX can edit files".to_string()), is_current: false, dismiss_on_select: true, ..Default::default() @@ -501,7 +501,7 @@ mod tests { #[test] fn renders_blank_line_between_subtitle_and_items() { - let view = make_selection_view(Some("Switch between Codex approval presets")); + let view = make_selection_view(Some("Switch between LLMX approval presets")); assert_snapshot!("list_selection_spacing_with_subtitle", render_lines(&view)); } @@ -511,7 +511,7 @@ mod tests { let tx = AppEventSender::new(tx_raw); let items = vec![SelectionItem { name: "Read Only".to_string(), - description: Some("Codex can read files".to_string()), + description: Some("LLMX can read files".to_string()), is_current: false, dismiss_on_select: true, ..Default::default() diff --git a/llmx-rs/tui/src/bottom_pane/mod.rs b/llmx-rs/tui/src/bottom_pane/mod.rs index 5cdd964d..336c6430 100644 --- a/llmx-rs/tui/src/bottom_pane/mod.rs +++ b/llmx-rs/tui/src/bottom_pane/mod.rs @@ -544,7 +544,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); pane.push_approval_request(exec_request()); @@ -564,7 +564,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); @@ -595,7 +595,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); @@ -660,7 +660,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); @@ -685,7 +685,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); @@ -714,7 +714,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); @@ -740,7 +740,7 @@ mod tests { frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); diff --git a/llmx-rs/tui/src/chatwidget.rs b/llmx-rs/tui/src/chatwidget.rs index bc07d724..c6419dd2 100644 --- a/llmx-rs/tui/src/chatwidget.rs +++ b/llmx-rs/tui/src/chatwidget.rs @@ -1854,7 +1854,7 @@ impl ChatWidget { self.bottom_pane.show_selection_view(SelectionViewParams { title: Some("Select Model and Effort".to_string()), - subtitle: Some("Switch the model for this and future Codex CLI sessions".to_string()), + subtitle: Some("Switch the model for this and future LLMX CLI sessions".to_string()), footer_hint: Some("Press enter to select reasoning effort, or esc to dismiss.".into()), items, ..Default::default() @@ -2031,10 +2031,10 @@ impl ChatWidget { let mut header = ColumnRenderable::new(); header.push(line![ - "Codex forced your settings back to Read Only on this Windows machine.".bold() + "LLMX forced your settings back to Read Only on this Windows machine.".bold() ]); header.push(line![ - "To re-enable Auto mode, run Codex inside Windows Subsystem for Linux (WSL) or enable Full Access manually.".dim() + "To re-enable Auto mode, run LLMX inside Windows Subsystem for Linux (WSL) or enable Full Access manually.".dim() ]); Box::new(header) } else { @@ -2201,7 +2201,7 @@ impl ChatWidget { let mut header_children: Vec> = Vec::new(); let title_line = Line::from("Enable full access?").bold(); let info_line = Line::from(vec![ - "When Codex runs with full access, it can edit any file on your computer and run commands with network, without your approval. " + "When LLMX runs with full access, it can edit any file on your computer and run commands with network, without your approval. " .into(), "Exercise caution when enabling full access. This significantly increases the risk of data loss, leaks, or unexpected behavior." .fg(Color::Red), @@ -2379,7 +2379,7 @@ impl ChatWidget { header.push(line![ "Auto mode requires Windows Subsystem for Linux (WSL2).".bold() ]); - header.push(line!["Run Codex inside WSL to enable sandboxed commands."]); + header.push(line!["Run LLMX inside WSL to enable sandboxed commands."]); header.push(line![""]); header.push(Paragraph::new(WSL_INSTRUCTIONS).wrap(Wrap { trim: false })); @@ -2777,7 +2777,7 @@ impl Notification { } Notification::EditApprovalRequested { cwd, changes } => { format!( - "Codex wants to edit {}", + "LLMX wants to edit {}", if changes.len() == 1 { #[allow(clippy::unwrap_used)] display_path_for(changes.first().unwrap(), cwd) diff --git a/llmx-rs/tui/src/chatwidget/tests.rs b/llmx-rs/tui/src/chatwidget/tests.rs index 465b98d3..0acdec35 100644 --- a/llmx-rs/tui/src/chatwidget/tests.rs +++ b/llmx-rs/tui/src/chatwidget/tests.rs @@ -283,7 +283,7 @@ fn make_chatwidget_manual() -> ( frame_requester: FrameRequester::test_dummy(), has_input_focus: true, enhanced_keys_supported: false, - placeholder_text: "Ask Codex to do anything".to_string(), + placeholder_text: "Ask LLMX to do anything".to_string(), disable_paste_burst: false, }); let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test")); @@ -941,7 +941,7 @@ fn slash_init_skips_when_project_doc_exists() { match op_rx.try_recv() { Err(TryRecvError::Empty) => {} - other => panic!("expected no Codex op to be sent, got {other:?}"), + other => panic!("expected no LLMX op to be sent, got {other:?}"), } let cells = drain_insert_history(&mut rx); @@ -1466,7 +1466,7 @@ fn approvals_popup_includes_wsl_note_for_auto_mode() { "expected auto preset description to mention WSL requirement only on Windows, popup: {popup}" ); assert_eq!( - popup.contains("Codex forced your settings back to Read Only on this Windows machine."), + popup.contains("LLMX forced your settings back to Read Only on this Windows machine."), cfg!(target_os = "windows") && chat.config.forced_auto_mode_downgraded_on_windows, "expected downgrade notice only when auto mode is forced off on Windows, popup: {popup}" ); @@ -1813,7 +1813,7 @@ async fn binary_size_transcript_snapshot() { // // Snapshot test: command approval modal // -// Synthesizes a Codex ExecApprovalRequest event to trigger the approval modal +// Synthesizes an LLMX ExecApprovalRequest event to trigger the approval modal // and snapshots the visual output using the ratatui TestBackend. #[test] fn approval_modal_exec_snapshot() { diff --git a/llmx-rs/tui/src/history_cell.rs b/llmx-rs/tui/src/history_cell.rs index c7a60d9f..4474346b 100644 --- a/llmx-rs/tui/src/history_cell.rs +++ b/llmx-rs/tui/src/history_cell.rs @@ -608,7 +608,7 @@ pub(crate) fn new_session_info( Line::from(vec![ " ".into(), "/init".into(), - " - create an AGENTS.md file with instructions for Codex".dim(), + " - create an AGENTS.md file with instructions for LLMX".dim(), ]), Line::from(vec![ " ".into(), @@ -618,7 +618,7 @@ pub(crate) fn new_session_info( Line::from(vec![ " ".into(), "/approvals".into(), - " - choose what Codex can do without approval".dim(), + " - choose what LLMX can do without approval".dim(), ]), Line::from(vec![ " ".into(), @@ -724,10 +724,10 @@ impl HistoryCell for SessionHeaderHistoryCell { let make_row = |spans: Vec>| Line::from(spans); - // Title line rendered inside the box: ">_ OpenAI Codex (vX)" + // Title line rendered inside the box: ">_ LLMX (vX)" let title_spans: Vec> = vec![ Span::from(">_ ").dim(), - Span::from("OpenAI Codex").bold(), + Span::from("LLMX").bold(), Span::from(" ").dim(), Span::from(format!("(v{})", self.version)).dim(), ]; diff --git a/llmx-rs/tui/src/lib.rs b/llmx-rs/tui/src/lib.rs index 6095b841..e6bf163f 100644 --- a/llmx-rs/tui/src/lib.rs +++ b/llmx-rs/tui/src/lib.rs @@ -198,7 +198,7 @@ pub async fn run_main( // Ensure the file is only readable and writable by the current user. // Doing the equivalent to `chmod 600` on Windows is quite a bit more code // and requires the Windows API crates, so we can reconsider that when - // Codex CLI is officially supported on Windows. + // LLMX CLI is officially supported on Windows. #[cfg(unix)] { use std::os::unix::fs::OpenOptionsExt; diff --git a/llmx-rs/tui/src/slash_command.rs b/llmx-rs/tui/src/slash_command.rs index 5dcd7f5f..59250240 100644 --- a/llmx-rs/tui/src/slash_command.rs +++ b/llmx-rs/tui/src/slash_command.rs @@ -37,16 +37,16 @@ impl SlashCommand { match self { SlashCommand::Feedback => "send logs to maintainers", SlashCommand::New => "start a new chat during a conversation", - SlashCommand::Init => "create an AGENTS.md file with instructions for Codex", + SlashCommand::Init => "create an AGENTS.md file with instructions for LLMX", SlashCommand::Compact => "summarize conversation to prevent hitting the context limit", SlashCommand::Review => "review my current changes and find issues", - SlashCommand::Undo => "ask Codex to undo a turn", + SlashCommand::Undo => "ask LLMX to undo a turn", SlashCommand::Quit | SlashCommand::Exit => "exit LLMX", SlashCommand::Diff => "show git diff (including untracked files)", SlashCommand::Mention => "mention a file", SlashCommand::Status => "show current session configuration and token usage", SlashCommand::Model => "choose what model and reasoning effort to use", - SlashCommand::Approvals => "choose what Codex can do without approval", + SlashCommand::Approvals => "choose what LLMX can do without approval", SlashCommand::Mcp => "list configured MCP tools", SlashCommand::Logout => "log out of LLMX", SlashCommand::Rollout => "print the rollout file path", diff --git a/llmx-rs/tui/src/status/card.rs b/llmx-rs/tui/src/status/card.rs index 5545087c..f4f7949b 100644 --- a/llmx-rs/tui/src/status/card.rs +++ b/llmx-rs/tui/src/status/card.rs @@ -267,7 +267,7 @@ impl HistoryCell for StatusHistoryCell { let mut lines: Vec> = Vec::new(); lines.push(Line::from(vec![ Span::from(format!("{}>_ ", FieldFormatter::INDENT)).dim(), - Span::from("OpenAI Codex").bold(), + Span::from("LLMX").bold(), Span::from(" ").dim(), Span::from(format!("(v{CODEX_CLI_VERSION})")).dim(), ])); diff --git a/llmx-rs/windows-sandbox-rs/sandbox_smoketests.py b/llmx-rs/windows-sandbox-rs/sandbox_smoketests.py index 097b0b89..2de2fc6f 100644 --- a/llmx-rs/windows-sandbox-rs/sandbox_smoketests.py +++ b/llmx-rs/windows-sandbox-rs/sandbox_smoketests.py @@ -1,5 +1,5 @@ # sandbox_smoketests.py -# Run a suite of smoke tests against the Windows sandbox via the Codex CLI +# Run a suite of smoke tests against the Windows sandbox via the LLMX CLI # Requires: Python 3.8+ on Windows. No pip requirements. import os @@ -10,10 +10,10 @@ from pathlib import Path from typing import List, Optional, Tuple def _resolve_codex_cmd() -> List[str]: - """Resolve the Codex CLI to invoke `codex sandbox windows`. + """Resolve the LLMX CLI to invoke `codex sandbox windows`. Prefer `codex` on PATH; if not found, try common local build locations. - Returns the argv prefix to run Codex. + Returns the argv prefix to run LLMX. """ # 1) Prefer PATH try: @@ -44,7 +44,7 @@ def _resolve_codex_cmd() -> List[str]: return [str(p)] raise FileNotFoundError( - "Codex CLI not found. Build it first, e.g.\n" + "LLMX CLI not found. Build it first, e.g.\n" " cargo build -p codex-cli --release\n" "or for debug:\n" " cargo build -p codex-cli\n" diff --git a/scripts/stage_npm_packages.py b/scripts/stage_npm_packages.py index 1b71e572..de322979 100755 --- a/scripts/stage_npm_packages.py +++ b/scripts/stage_npm_packages.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -"""Stage one or more Codex npm packages for release.""" +"""Stage one or more LLMX npm packages for release.""" from __future__ import annotations