Phase 6: Testing & Validation - Additional Fixes

Fixed remaining references found during testing:

Rust source code fixes:
- Updated CLI bin_name and override_usage: codex → llmx
- Updated test examples in wsl_paths.rs
- Updated GitHub URLs: github.com/openai/codex → github.com/valknar/llmx
- Updated directory references: ~/.codex/ → ~/.llmx/
- Updated documentation link: "Codex docs" → "LLMX docs"
- Updated feedback URL to point to valknar/llmx repository

Configuration files:
- Regenerated llmx-cli/package-lock.json with updated package name
- Updated pnpm-lock.yaml

Test results:
- TypeScript SDK build: ✓ Success
- TypeScript lint: ✓ Pass
- Rust tests: 12/13 passed (1 locale-specific test failure unrelated to rename)
- Rust release build: In progress

Files changed: 22 files (49 insertions, 46 deletions)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Sebastian Krüger
2025-11-11 15:03:50 +01:00
parent c493ea1347
commit 7be8b00b05
22 changed files with 49 additions and 46 deletions

View File

@@ -1,14 +1,15 @@
{
"name": "@openai/codex",
"version": "0.0.0-dev",
"name": "@llmx/llmx",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@openai/codex",
"version": "0.0.0-dev",
"name": "@llmx/llmx",
"version": "0.1.0",
"license": "Apache-2.0",
"bin": {
"codex": "bin/codex.js"
"llmx": "bin/llmx.js"
},
"engines": {
"node": ">=16"

View File

@@ -72,7 +72,7 @@ pub fn arg0_dispatch() -> Option<TempDir> {
/// `codex-linux-sandbox` we *directly* execute
/// [`llmx_linux_sandbox::run_main`] (which never returns). Otherwise we:
///
/// 1. Load `.env` values from `~/.codex/.env` before creating any threads.
/// 1. Load `.env` values from `~/.llmx/.env` before creating any threads.
/// 2. Construct a Tokio multi-thread runtime.
/// 3. Derive the path to the current executable (so children can re-invoke the
/// sandbox) when running on Linux.
@@ -109,7 +109,7 @@ where
const ILLEGAL_ENV_VAR_PREFIX: &str = "CODEX_";
/// Load env vars from ~/.codex/.env.
/// Load env vars from ~/.llmx/.env.
///
/// Security: Do not allow `.env` files to create or modify any variables
/// with names starting with `CODEX_`.

View File

@@ -36,7 +36,7 @@ use llmx_core::config::Config;
use llmx_core::config::ConfigOverrides;
use llmx_core::features::is_known_feature_key;
/// Codex CLI
/// LLMX CLI
///
/// If no subcommand is specified, options will be forwarded to the interactive CLI.
#[derive(Debug, Parser)]
@@ -46,10 +46,10 @@ use llmx_core::features::is_known_feature_key;
// If a subcommand is given, ignore requirements of the default args.
subcommand_negates_reqs = true,
// The executable is sometimes invoked via a platformspecific name like
// `codex-x86_64-unknown-linux-musl`, but the help output should always use
// the generic `codex` command name that users run.
bin_name = "codex",
override_usage = "codex [OPTIONS] [PROMPT]\n codex [OPTIONS] <COMMAND> [ARGS]"
// `llmx-x86_64-unknown-linux-musl`, but the help output should always use
// the generic `llmx` command name that users run.
bin_name = "llmx",
override_usage = "llmx [OPTIONS] [PROMPT]\n llmx [OPTIONS] <COMMAND> [ARGS]"
)]
struct MultitoolCli {
#[clap(flatten)]

View File

@@ -27,7 +27,7 @@ use llmx_rmcp_client::supports_oauth_login;
/// - `serve` — run the MCP server on stdio
/// - `list` — list configured servers (with `--json`)
/// - `get` — show a single server (with `--json`)
/// - `add` — add a server launcher entry to `~/.codex/config.toml`
/// - `add` — add a server launcher entry to `~/.llmx/config.toml`
/// - `remove` — delete a server entry
#[derive(Debug, clap::Parser)]
pub struct McpCli {
@@ -354,7 +354,7 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
if !config.features.enabled(Feature::RmcpClient) {
bail!(
"OAuth login is only supported when [features].rmcp_client is true in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
"OAuth login is only supported when [features].rmcp_client is true in config.toml. See https://github.com/valknar/llmx/blob/main/docs/config.md#feature-flags for details."
);
}

View File

@@ -2,7 +2,7 @@ use std::ffi::OsStr;
/// WSL-specific path helpers used by the updater logic.
///
/// See https://github.com/openai/codex/issues/6086.
/// See https://github.com/valknar/llmx/issues/6086.
pub fn is_wsl() -> bool {
#[cfg(target_os = "linux")]
{
@@ -59,14 +59,14 @@ mod tests {
#[test]
fn win_to_wsl_basic() {
assert_eq!(
win_path_to_wsl(r"C:\Temp\codex.zip").as_deref(),
Some("/mnt/c/Temp/codex.zip")
win_path_to_wsl(r"C:\Temp\llmx.zip").as_deref(),
Some("/mnt/c/Temp/llmx.zip")
);
assert_eq!(
win_path_to_wsl("D:/Work/codex.tgz").as_deref(),
Some("/mnt/d/Work/codex.tgz")
win_path_to_wsl("D:/Work/llmx.tgz").as_deref(),
Some("/mnt/d/Work/llmx.tgz")
);
assert!(win_path_to_wsl("/home/user/codex").is_none());
assert!(win_path_to_wsl("/home/user/llmx").is_none());
}
#[test]

View File

@@ -18,7 +18,7 @@ use toml::Value;
#[derive(Parser, Debug, Default, Clone)]
pub struct CliConfigOverrides {
/// Override a configuration value that would otherwise be loaded from
/// `~/.codex/config.toml`. Use a dotted path (`foo.bar.baz`) to override
/// `~/.llmx/config.toml`. Use a dotted path (`foo.bar.baz`) to override
/// nested values. The `value` portion is parsed as TOML. If it fails to
/// parse as TOML, the raw string is used as a literal.
///

View File

@@ -142,7 +142,7 @@ pub struct Config {
/// appends one extra argument containing a JSON payload describing the
/// event.
///
/// Example `~/.codex/config.toml` snippet:
/// Example `~/.llmx/config.toml` snippet:
///
/// ```toml
/// notify = ["notify-send", "Codex"]
@@ -178,7 +178,7 @@ pub struct Config {
/// Preferred store for MCP OAuth credentials.
/// keyring: Use an OS-specific keyring service.
/// Credentials stored in the keyring will only be readable by Codex unless the user explicitly grants access via OS-level keyring access.
/// https://github.com/openai/codex/blob/main/codex-rs/rmcp-client/src/oauth.rs#L2
/// https://github.com/valknar/llmx/blob/main/codex-rs/rmcp-client/src/oauth.rs#L2
/// file: CODEX_HOME/.credentials.json
/// This file will be readable to Codex and other applications running as the same user.
/// auto (default): keyring if available, otherwise file.
@@ -197,7 +197,7 @@ pub struct Config {
/// overridden by the `CODEX_HOME` environment variable).
pub codex_home: PathBuf,
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
/// Settings that govern if and what will be written to `~/.llmx/history.jsonl`.
pub history: History,
/// Optional URI-based file opener. If set, citations to files in the model
@@ -504,7 +504,7 @@ fn apply_toml_override(root: &mut TomlValue, path: &str, value: TomlValue) {
}
}
/// Base config deserialized from ~/.codex/config.toml.
/// Base config deserialized from ~/.llmx/config.toml.
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
pub struct ConfigToml {
/// Optional override of model selection.
@@ -571,7 +571,7 @@ pub struct ConfigToml {
/// Preferred backend for storing MCP OAuth credentials.
/// keyring: Use an OS-specific keyring service.
/// https://github.com/openai/codex/blob/main/codex-rs/rmcp-client/src/oauth.rs#L2
/// https://github.com/valknar/llmx/blob/main/codex-rs/rmcp-client/src/oauth.rs#L2
/// file: Use a file in the Codex home directory.
/// auto (default): Use the OS-specific keyring service if available, otherwise use a file.
#[serde(default)]
@@ -594,7 +594,7 @@ pub struct ConfigToml {
#[serde(default)]
pub profiles: HashMap<String, ConfigProfile>,
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
/// Settings that govern if and what will be written to `~/.llmx/history.jsonl`.
#[serde(default)]
pub history: Option<History>,

View File

@@ -250,7 +250,7 @@ impl UriBasedFileOpener {
}
}
/// Settings that govern if and what will be written to `~/.codex/history.jsonl`.
/// Settings that govern if and what will be written to `~/.llmx/history.jsonl`.
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
pub struct History {
/// If true, history entries will not be written to disk.

View File

@@ -20,7 +20,7 @@ use std::sync::OnceLock;
/// However, future users of this should use this with caution as a result.
/// In addition, we want to be confident that this value is used for ALL clients and doing that requires a
/// lot of wiring and it's easy to miss code paths by doing so.
/// See https://github.com/openai/codex/pull/3388/files for an example of what that would look like.
/// See https://github.com/valknar/llmx/pull/3388/files for an example of what that would look like.
/// Finally, we want to make sure this is set for ALL mcp clients without needing to know a special env var
/// or having to set data that they already specified in the mcp initialize request somewhere else.
///

View File

@@ -552,7 +552,7 @@ impl Session {
None
} else {
Some(format!(
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/valknar/llmx/blob/main/docs/config.md#feature-flags for details."
))
};
post_session_configured_events.push(Event {

View File

@@ -1,6 +1,6 @@
//! Persistence layer for the global, append-only *message history* file.
//!
//! The history is stored at `~/.codex/history.jsonl` with **one JSON object per
//! The history is stored at `~/.llmx/history.jsonl` with **one JSON object per
//! line** so that it can be efficiently appended to and parsed with standard
//! JSON-Lines tooling. Each record has the following schema:
//!
@@ -75,7 +75,7 @@ pub(crate) async fn append_entry(
// TODO: check `text` for sensitive patterns
// Resolve `~/.codex/history.jsonl` and ensure the parent directory exists.
// Resolve `~/.llmx/history.jsonl` and ensure the parent directory exists.
let path = history_filepath(config);
if let Some(parent) = path.parent() {
tokio::fs::create_dir_all(parent).await?;

View File

@@ -2,7 +2,7 @@
//!
//! Providers can be defined in two places:
//! 1. Built-in defaults compiled into the binary so Codex works out-of-the-box.
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
//! 2. User-defined entries inside `~/.llmx/config.toml` under the `model_providers`
//! key. These override or extend the defaults at runtime.
use crate::CodexAuth;

View File

@@ -150,7 +150,7 @@ pub(crate) async fn get_conversation(path: &Path) -> io::Result<String> {
/// Load conversation file paths from disk using directory traversal.
///
/// Directory layout: `~/.codex/sessions/YYYY/MM/DD/rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl`
/// Directory layout: `~/.llmx/sessions/YYYY/MM/DD/rollout-YYYY-MM-DDThh-mm-ss-<uuid>.jsonl`
/// Returned newest (latest) first.
async fn traverse_directories_for_paths(
root: PathBuf,

View File

@@ -40,8 +40,8 @@ use llmx_protocol::protocol::SessionSource;
/// Rollouts are recorded as JSONL and can be inspected with tools such as:
///
/// ```ignore
/// $ jq -C . ~/.codex/sessions/rollout-2025-05-07T17-24-21-5973b6c0-94b8-487b-a530-2aeb6098ae0e.jsonl
/// $ fx ~/.codex/sessions/rollout-2025-05-07T17-24-21-5973b6c0-94b8-487b-a530-2aeb6098ae0e.jsonl
/// $ jq -C . ~/.llmx/sessions/rollout-2025-05-07T17-24-21-5973b6c0-94b8-487b-a530-2aeb6098ae0e.jsonl
/// $ fx ~/.llmx/sessions/rollout-2025-05-07T17-24-21-5973b6c0-94b8-487b-a530-2aeb6098ae0e.jsonl
/// ```
#[derive(Clone)]
pub struct RolloutRecorder {
@@ -312,7 +312,7 @@ fn create_log_file(
config: &Config,
conversation_id: ConversationId,
) -> std::io::Result<LogFileInfo> {
// Resolve ~/.codex/sessions/YYYY/MM/DD and create it if missing.
// Resolve ~/.llmx/sessions/YYYY/MM/DD and create it if missing.
let timestamp = OffsetDateTime::now_local()
.map_err(|e| IoError::other(format!("failed to get local time: {e}")))?;
let mut dir = config.codex_home.clone();

View File

@@ -256,7 +256,7 @@ pub(crate) struct ExecCommandContext {
pub(crate) tool_name: String,
pub(crate) otel_event_manager: OtelEventManager,
// TODO(abhisek-oai): Find a better way to track this.
// https://github.com/openai/codex/pull/2471/files#r2470352242
// https://github.com/valknar/llmx/pull/2471/files#r2470352242
pub(crate) is_user_shell_command: bool,
}

View File

@@ -42,7 +42,7 @@ async fn emits_deprecation_notice_for_legacy_feature_flag() -> anyhow::Result<()
assert_eq!(
details.as_deref(),
Some(
"Enable it with `--enable unified_exec` or `[features].unified_exec` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
"Enable it with `--enable unified_exec` or `[features].unified_exec` in config.toml. See https://github.com/valknar/llmx/blob/main/docs/config.md#feature-flags for details."
),
);

View File

@@ -26,7 +26,7 @@ use super::popup_consts::standard_popup_hint_line;
use super::textarea::TextArea;
use super::textarea::TextAreaState;
const BASE_ISSUE_URL: &str = "https://github.com/openai/codex/issues/new?template=2-bug-report.yml";
const BASE_ISSUE_URL: &str = "https://github.com/valknar/llmx/issues/new?template=2-bug-report.yml";
/// Minimal input overlay to collect an optional feedback note, then upload
/// both logs and rollout with classification + metadata.

View File

@@ -159,7 +159,7 @@ pub fn normalize_pasted_path(pasted: &str) -> Option<PathBuf> {
}
// TODO: We'll improve the implementation/unit tests over time, as appropriate.
// Possibly use typed-path: https://github.com/openai/codex/pull/2567/commits/3cc92b78e0a1f94e857cf4674d3a9db918ed352e
// Possibly use typed-path: https://github.com/valknar/llmx/pull/2567/commits/3cc92b78e0a1f94e857cf4674d3a9db918ed352e
//
// Detect unquoted Windows paths and bypass POSIX shlex which
// treats backslashes as escapes (e.g., C:\Users\Alice\file.png).

View File

@@ -306,7 +306,7 @@ impl HistoryCell for UpdateAvailableHistoryCell {
} else {
line![
"See ",
"https://github.com/openai/codex".cyan().underlined(),
"https://github.com/valknar/llmx".cyan().underlined(),
" for installation options."
]
};
@@ -321,7 +321,7 @@ impl HistoryCell for UpdateAvailableHistoryCell {
update_instruction,
"",
"See full release notes:",
"https://github.com/openai/codex/releases/latest"
"https://github.com/valknar/llmx/releases/latest"
.cyan()
.underlined(),
];
@@ -1065,7 +1065,7 @@ pub(crate) fn empty_mcp_output() -> PlainHistoryCell {
" • No MCP servers configured.".italic().into(),
Line::from(vec![
" See the ".into(),
"\u{1b}]8;;https://github.com/openai/codex/blob/main/docs/config.md#mcp_servers\u{7}MCP docs\u{1b}]8;;\u{7}".underlined(),
"\u{1b}]8;;https://github.com/valknar/llmx/blob/main/docs/config.md#mcp_servers\u{7}MCP docs\u{1b}]8;;\u{7}".underlined(),
" to configure them.".into(),
])
.style(Style::default().add_modifier(Modifier::DIM)),

View File

@@ -291,7 +291,7 @@ impl AuthModeWidget {
" Decide how much autonomy you want to grant Codex".into(),
Line::from(vec![
" For more details see the ".into(),
"\u{1b}]8;;https://github.com/openai/codex\u{7}Codex docs\u{1b}]8;;\u{7}".underlined(),
"\u{1b}]8;;https://github.com/valknar/llmx\u{7}LLMX docs\u{1b}]8;;\u{7}".underlined(),
])
.dim(),
"".into(),

View File

@@ -204,7 +204,7 @@ impl WidgetRef for &UpdatePromptScreen {
column.push(
Line::from(vec![
"Release notes: ".dim(),
"https://github.com/openai/codex/releases/latest"
"https://github.com/valknar/llmx/releases/latest"
.dim()
.underlined(),
])

2
pnpm-lock.yaml generated
View File

@@ -17,6 +17,8 @@ importers:
specifier: ^3.5.3
version: 3.5.3
llmx-cli: {}
sdk/typescript:
devDependencies:
'@modelcontextprotocol/sdk':