Phase 7: Build & Release Pipeline
Updated GitHub Actions workflows and build scripts: GitHub Actions: - Updated all workflow files to use llmx-rs instead of codex-rs - Updated rust-release.yml to build llmx binary - Updated rust-ci.yml for continuous integration - Updated sdk.yml for TypeScript SDK workflows Build Scripts: - Renamed scripts/debug-codex.sh → scripts/debug-llmx.sh - Updated debug script to use LLMX_RS_DIR and build llmx binary - Updated scripts/stage_npm_packages.py with new package names - Updated llmx-cli/scripts/build_npm_package.py: - Package choices: "codex" → "llmx", "codex-responses-api-proxy" → "llmx-responses-api-proxy" - Updated default package: codex → llmx - Updated component dependencies - Updated llmx-cli/scripts/install_native_deps.py: - Binary component names: codex → llmx - Artifact prefixes and destinations updated - Variable names: CODEX_CLI_ROOT → LLMX_CLI_ROOT - Default components: ["codex", "rg"] → ["llmx", "rg"] GitHub URLs: - Updated all references: github.com/openai/codex → github.com/valknar/llmx Files changed: 8 files (build scripts and workflows) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
20
.github/workflows/rust-ci.yml
vendored
20
.github/workflows/rust-ci.yml
vendored
@@ -33,13 +33,13 @@ jobs:
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA"...HEAD)
|
||||
else
|
||||
# On push / manual runs, default to running everything
|
||||
files=("codex-rs/force" ".github/force")
|
||||
files=("llmx-rs/force" ".github/force")
|
||||
fi
|
||||
|
||||
codex=false
|
||||
workflows=false
|
||||
for f in "${files[@]}"; do
|
||||
[[ $f == codex-rs/* ]] && codex=true
|
||||
[[ $f == llmx-rs/* ]] && codex=true
|
||||
[[ $f == .github/* ]] && workflows=true
|
||||
done
|
||||
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
working-directory: llmx-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
working-directory: llmx-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
working-directory: llmx-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects.
|
||||
RUSTC_WRAPPER: sccache
|
||||
@@ -164,7 +164,7 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('llmx-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
@@ -271,7 +271,7 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('llmx-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
working-directory: llmx-rs
|
||||
env:
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
@@ -365,7 +365,7 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('llmx-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
@@ -421,7 +421,7 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('llmx-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
|
||||
10
.github/workflows/rust-release.yml
vendored
10
.github/workflows/rust-release.yml
vendored
@@ -1,4 +1,4 @@
|
||||
# Release workflow for codex-rs.
|
||||
# Release workflow for llmx-rs.
|
||||
# To release, follow a workflow like:
|
||||
# ```
|
||||
# git tag -a rust-v0.1.0 -m "Release 0.1.0"
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
|
||||
# 2. Extract versions
|
||||
tag_ver="${GITHUB_REF_NAME#rust-v}"
|
||||
cargo_ver="$(grep -m1 '^version' codex-rs/Cargo.toml \
|
||||
cargo_ver="$(grep -m1 '^version' llmx-rs/Cargo.toml \
|
||||
| sed -E 's/version *= *"([^"]+)".*/\1/')"
|
||||
|
||||
# 3. Compare
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
working-directory: llmx-rs
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
${{ github.workspace }}/llmx-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
@@ -369,7 +369,7 @@ jobs:
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
# equivalents we generated in the previous step.
|
||||
path: |
|
||||
codex-rs/dist/${{ matrix.target }}/*
|
||||
llmx-rs/dist/${{ matrix.target }}/*
|
||||
|
||||
release:
|
||||
needs: build
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
working-directory: codex-rs
|
||||
working-directory: llmx-rs
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stage and optionally package the @openai/codex npm module."""
|
||||
"""Stage and optionally package the @valknar/llmx npm module."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
@@ -12,17 +12,17 @@ from pathlib import Path
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "llmx-rs" / "responses-api-proxy" / "npm"
|
||||
CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript"
|
||||
|
||||
PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
"codex-sdk": ["codex"],
|
||||
"llmx": ["codex", "rg"],
|
||||
"llmx-responses-api-proxy": ["llmx-responses-api-proxy"],
|
||||
"llmx-sdk": ["llmx"],
|
||||
}
|
||||
COMPONENT_DEST_DIR: dict[str, str] = {
|
||||
"codex": "codex",
|
||||
"codex-responses-api-proxy": "codex-responses-api-proxy",
|
||||
"llmx": "codex",
|
||||
"llmx-responses-api-proxy": "codex-responses-api-proxy",
|
||||
"rg": "path",
|
||||
}
|
||||
|
||||
@@ -31,8 +31,8 @@ def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy", "codex-sdk"),
|
||||
default="codex",
|
||||
choices=("llmx", "llmx-responses-api-proxy", "llmx-sdk"),
|
||||
default="llmx",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -107,14 +107,14 @@ def main() -> int:
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
if package == "codex":
|
||||
if package == "llmx":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
elif package == "codex-responses-api-proxy":
|
||||
elif package == "llmx-responses-api-proxy":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
@@ -155,7 +155,7 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
if package == "codex":
|
||||
if package == "llmx":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
@@ -168,7 +168,7 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
elif package == "llmx-responses-api-proxy":
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
@@ -179,7 +179,7 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
elif package == "codex-sdk":
|
||||
elif package == "llmx-sdk":
|
||||
package_json_path = CODEX_SDK_ROOT / "package.json"
|
||||
stage_codex_sdk_sources(staging_dir)
|
||||
else:
|
||||
@@ -189,7 +189,7 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
if package == "codex-sdk":
|
||||
if package == "llmx-sdk":
|
||||
scripts = package_json.get("scripts")
|
||||
if isinstance(scripts, dict):
|
||||
scripts.pop("prepare", None)
|
||||
|
||||
@@ -17,10 +17,10 @@ from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
LLMX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/valknar/llmx/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
RG_MANIFEST = LLMX_CLI_ROOT / "bin" / "rg"
|
||||
BINARY_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
@@ -39,15 +39,15 @@ class BinaryComponent:
|
||||
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex",
|
||||
"llmx": BinaryComponent(
|
||||
artifact_prefix="llmx",
|
||||
dest_dir="llmx",
|
||||
binary_basename="llmx",
|
||||
),
|
||||
"codex-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="codex-responses-api-proxy",
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
"llmx-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="llmx-responses-api-proxy",
|
||||
dest_dir="llmx-responses-api-proxy",
|
||||
binary_basename="llmx-responses-api-proxy",
|
||||
),
|
||||
}
|
||||
|
||||
@@ -97,11 +97,11 @@ def parse_args() -> argparse.Namespace:
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
|
||||
codex_cli_root = (args.root or LLMX_CLI_ROOT).resolve()
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or ["codex", "rg"]
|
||||
components = args.components or ["llmx", "rg"]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
@@ -110,7 +110,7 @@ def main() -> int:
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
with tempfile.TemporaryDirectory(prefix="llmx-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
@@ -197,7 +197,7 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
"--dir",
|
||||
str(dest_dir),
|
||||
"--repo",
|
||||
"openai/codex",
|
||||
"valknar/llmx",
|
||||
workflow_id,
|
||||
]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Set "chatgpt.cliExecutable": "/Users/<USERNAME>/code/codex/scripts/debug-codex.sh" in VSCode settings to always get the
|
||||
# latest codex-rs binary when debugging Codex Extension.
|
||||
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CODEX_RS_DIR=$(realpath "$(dirname "$0")/../codex-rs")
|
||||
(cd "$CODEX_RS_DIR" && cargo run --quiet --bin codex -- "$@")
|
||||
10
scripts/debug-llmx.sh
Executable file
10
scripts/debug-llmx.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Set "chatgpt.cliExecutable": "/Users/<USERNAME>/code/llmx/scripts/debug-llmx.sh" in VSCode settings to always get the
|
||||
# latest llmx-rs binary when debugging LLMX Extension.
|
||||
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
LLMX_RS_DIR=$(realpath "$(dirname "$0")/../llmx-rs")
|
||||
(cd "$LLMX_RS_DIR" && cargo run --quiet --bin llmx -- "$@")
|
||||
@@ -14,12 +14,12 @@ from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parent.parent
|
||||
BUILD_SCRIPT = REPO_ROOT / "codex-cli" / "scripts" / "build_npm_package.py"
|
||||
INSTALL_NATIVE_DEPS = REPO_ROOT / "codex-cli" / "scripts" / "install_native_deps.py"
|
||||
BUILD_SCRIPT = REPO_ROOT / "llmx-cli" / "scripts" / "build_npm_package.py"
|
||||
INSTALL_NATIVE_DEPS = REPO_ROOT / "llmx-cli" / "scripts" / "install_native_deps.py"
|
||||
WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||
GITHUB_REPO = "openai/codex"
|
||||
GITHUB_REPO = "valknar/llmx"
|
||||
|
||||
_SPEC = importlib.util.spec_from_file_location("codex_build_npm_package", BUILD_SCRIPT)
|
||||
_SPEC = importlib.util.spec_from_file_location("llmx_build_npm_package", BUILD_SCRIPT)
|
||||
if _SPEC is None or _SPEC.loader is None:
|
||||
raise RuntimeError(f"Unable to load module from {BUILD_SCRIPT}")
|
||||
_BUILD_MODULE = importlib.util.module_from_spec(_SPEC)
|
||||
|
||||
Reference in New Issue
Block a user