diff --git a/.github/workflows/rust-release.yml b/.github/workflows/rust-release.yml index 345fd091..05bbb9a4 100644 --- a/.github/workflows/rust-release.yml +++ b/.github/workflows/rust-release.yml @@ -218,17 +218,30 @@ jobs: # build_npm_package.py requires DotSlash when staging releases. - uses: facebook/install-dotslash@v2 - - name: Stage npm package + - name: Stage codex CLI npm package env: GH_TOKEN: ${{ github.token }} run: | set -euo pipefail TMP_DIR="${RUNNER_TEMP}/npm-stage" ./codex-cli/scripts/build_npm_package.py \ + --package codex \ --release-version "${{ steps.release_name.outputs.name }}" \ --staging-dir "${TMP_DIR}" \ --pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz" + - name: Stage responses API proxy npm package + env: + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + TMP_DIR="${RUNNER_TEMP}/npm-stage-responses" + ./codex-cli/scripts/build_npm_package.py \ + --package codex-responses-api-proxy \ + --release-version "${{ steps.release_name.outputs.name }}" \ + --staging-dir "${TMP_DIR}" \ + --pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz" + - name: Create GitHub Release uses: softprops/action-gh-release@v2 with: @@ -271,7 +284,7 @@ jobs: - name: Update npm run: npm install -g npm@latest - - name: Download npm tarball from release + - name: Download npm tarballs from release env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | @@ -283,6 +296,10 @@ jobs: --repo "${GITHUB_REPOSITORY}" \ --pattern "codex-npm-${version}.tgz" \ --dir dist/npm + gh release download "$tag" \ + --repo "${GITHUB_REPOSITORY}" \ + --pattern "codex-responses-api-proxy-npm-${version}.tgz" \ + --dir dist/npm # No NODE_AUTH_TOKEN needed because we use OIDC. - name: Publish to npm @@ -296,7 +313,14 @@ jobs: tag_args+=(--tag "${NPM_TAG}") fi - npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}" + tarballs=( + "codex-npm-${VERSION}.tgz" + "codex-responses-api-proxy-npm-${VERSION}.tgz" + ) + + for tarball in "${tarballs[@]}"; do + npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}" + done update-branch: name: Update latest-alpha-cli branch diff --git a/codex-cli/README.md b/codex-cli/README.md index e988b384..f3414f1c 100644 --- a/codex-cli/README.md +++ b/codex-cli/README.md @@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS: | Requirement | Details | | --------------------------- | --------------------------------------------------------------- | | Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** | -| Node.js | **22 or newer** (LTS recommended) | +| Node.js | **16 or newer** (Node 20 LTS recommended) | | Git (optional, recommended) | 2.23+ for built-in PR helpers | | RAM | 4-GB minimum (8-GB recommended) | @@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
Does it work on Windows? -Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22. +Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
diff --git a/codex-cli/bin/codex.js b/codex-cli/bin/codex.js index 3621f48c..17dd98a8 100755 --- a/codex-cli/bin/codex.js +++ b/codex-cli/bin/codex.js @@ -1,6 +1,7 @@ #!/usr/bin/env node // Unified entry point for the Codex CLI. +import { spawn } from "node:child_process"; import { existsSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; @@ -68,7 +69,6 @@ const binaryPath = path.join(archRoot, "codex", codexBinaryName); // executing. This allows us to forward those signals to the child process // and guarantees that when either the child terminates or the parent // receives a fatal signal, both processes exit in a predictable manner. -const { spawn } = await import("child_process"); function getUpdatedPath(newDirs) { const pathSep = process.platform === "win32" ? ";" : ":"; diff --git a/codex-cli/package-lock.json b/codex-cli/package-lock.json index a28bb9cd..58ee8463 100644 --- a/codex-cli/package-lock.json +++ b/codex-cli/package-lock.json @@ -11,7 +11,7 @@ "codex": "bin/codex.js" }, "engines": { - "node": ">=20" + "node": ">=16" } } } diff --git a/codex-cli/package.json b/codex-cli/package.json index 03f234d6..b83309e4 100644 --- a/codex-cli/package.json +++ b/codex-cli/package.json @@ -7,7 +7,7 @@ }, "type": "module", "engines": { - "node": ">=20" + "node": ">=16" }, "files": [ "bin", diff --git a/codex-cli/scripts/build_npm_package.py b/codex-cli/scripts/build_npm_package.py index bedb1c24..2ae8a96e 100755 --- a/codex-cli/scripts/build_npm_package.py +++ b/codex-cli/scripts/build_npm_package.py @@ -13,6 +13,7 @@ from pathlib import Path SCRIPT_DIR = Path(__file__).resolve().parent CODEX_CLI_ROOT = SCRIPT_DIR.parent REPO_ROOT = CODEX_CLI_ROOT.parent +RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm" GITHUB_REPO = "openai/codex" # The docs are not clear on what the expected value/format of @@ -23,6 +24,12 @@ WORKFLOW_NAME = ".github/workflows/rust-release.yml" def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.") + parser.add_argument( + "--package", + choices=("codex", "codex-responses-api-proxy"), + default="codex", + help="Which npm package to stage (default: codex).", + ) parser.add_argument( "--version", help="Version number to write to package.json inside the staged package.", @@ -63,6 +70,7 @@ def parse_args() -> argparse.Namespace: def main() -> int: args = parse_args() + package = args.package version = args.version release_version = args.release_version if release_version: @@ -76,7 +84,7 @@ def main() -> int: staging_dir, created_temp = prepare_staging_dir(args.staging_dir) try: - stage_sources(staging_dir, version) + stage_sources(staging_dir, version, package) workflow_url = args.workflow_url resolved_head_sha: str | None = None @@ -100,16 +108,23 @@ def main() -> int: if not workflow_url: raise RuntimeError("Unable to determine workflow URL for native binaries.") - install_native_binaries(staging_dir, workflow_url) + install_native_binaries(staging_dir, workflow_url, package) if release_version: staging_dir_str = str(staging_dir) - print( - f"Staged version {version} for release in {staging_dir_str}\n\n" - "Verify the CLI:\n" - f" node {staging_dir_str}/bin/codex.js --version\n" - f" node {staging_dir_str}/bin/codex.js --help\n\n" - ) + if package == "codex": + print( + f"Staged version {version} for release in {staging_dir_str}\n\n" + "Verify the CLI:\n" + f" node {staging_dir_str}/bin/codex.js --version\n" + f" node {staging_dir_str}/bin/codex.js --help\n\n" + ) + else: + print( + f"Staged version {version} for release in {staging_dir_str}\n\n" + "Verify the responses API proxy:\n" + f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n" + ) else: print(f"Staged package in {staging_dir}") @@ -136,20 +151,34 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]: return temp_dir, True -def stage_sources(staging_dir: Path, version: str) -> None: +def stage_sources(staging_dir: Path, version: str, package: str) -> None: bin_dir = staging_dir / "bin" bin_dir.mkdir(parents=True, exist_ok=True) - shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js") - rg_manifest = CODEX_CLI_ROOT / "bin" / "rg" - if rg_manifest.exists(): - shutil.copy2(rg_manifest, bin_dir / "rg") + if package == "codex": + shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js") + rg_manifest = CODEX_CLI_ROOT / "bin" / "rg" + if rg_manifest.exists(): + shutil.copy2(rg_manifest, bin_dir / "rg") - readme_src = REPO_ROOT / "README.md" - if readme_src.exists(): - shutil.copy2(readme_src, staging_dir / "README.md") + readme_src = REPO_ROOT / "README.md" + if readme_src.exists(): + shutil.copy2(readme_src, staging_dir / "README.md") - with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh: + package_json_path = CODEX_CLI_ROOT / "package.json" + elif package == "codex-responses-api-proxy": + launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js" + shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js") + + readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md" + if readme_src.exists(): + shutil.copy2(readme_src, staging_dir / "README.md") + + package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json" + else: + raise RuntimeError(f"Unknown package '{package}'.") + + with open(package_json_path, "r", encoding="utf-8") as fh: package_json = json.load(fh) package_json["version"] = version @@ -158,8 +187,20 @@ def stage_sources(staging_dir: Path, version: str) -> None: out.write("\n") -def install_native_binaries(staging_dir: Path, workflow_url: str) -> None: - cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url, str(staging_dir)] +def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None: + package_components = { + "codex": ["codex", "rg"], + "codex-responses-api-proxy": ["codex-responses-api-proxy"], + } + + components = package_components.get(package) + if components is None: + raise RuntimeError(f"Unknown package '{package}'.") + + cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url] + for component in components: + cmd.extend(["--component", component]) + cmd.append(str(staging_dir)) subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT) diff --git a/codex-cli/scripts/install_native_deps.py b/codex-cli/scripts/install_native_deps.py index 7fbb4439..8d3909c9 100755 --- a/codex-cli/scripts/install_native_deps.py +++ b/codex-cli/scripts/install_native_deps.py @@ -9,6 +9,7 @@ import subprocess import tarfile import tempfile import zipfile +from dataclasses import dataclass from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path from typing import Iterable, Sequence @@ -20,7 +21,7 @@ CODEX_CLI_ROOT = SCRIPT_DIR.parent DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0 VENDOR_DIR_NAME = "vendor" RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg" -CODEX_TARGETS = ( +BINARY_TARGETS = ( "x86_64-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-apple-darwin", @@ -29,6 +30,27 @@ CODEX_TARGETS = ( "aarch64-pc-windows-msvc", ) + +@dataclass(frozen=True) +class BinaryComponent: + artifact_prefix: str # matches the artifact filename prefix (e.g. codex-.zst) + dest_dir: str # directory under vendor// where the binary is installed + binary_basename: str # executable name inside dest_dir (before optional .exe) + + +BINARY_COMPONENTS = { + "codex": BinaryComponent( + artifact_prefix="codex", + dest_dir="codex", + binary_basename="codex", + ), + "codex-responses-api-proxy": BinaryComponent( + artifact_prefix="codex-responses-api-proxy", + dest_dir="codex-responses-api-proxy", + binary_basename="codex-responses-api-proxy", + ), +} + RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [ ("x86_64-unknown-linux-musl", "linux-x86_64"), ("aarch64-unknown-linux-musl", "linux-aarch64"), @@ -50,6 +72,16 @@ def parse_args() -> argparse.Namespace: "known good run when omitted." ), ) + parser.add_argument( + "--component", + dest="components", + action="append", + choices=tuple(list(BINARY_COMPONENTS) + ["rg"]), + help=( + "Limit installation to the specified components." + " May be repeated. Defaults to 'codex' and 'rg'." + ), + ) parser.add_argument( "root", nargs="?", @@ -69,18 +101,28 @@ def main() -> int: vendor_dir = codex_cli_root / VENDOR_DIR_NAME vendor_dir.mkdir(parents=True, exist_ok=True) + components = args.components or ["codex", "rg"] + workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip() if not workflow_url: workflow_url = DEFAULT_WORKFLOW_URL workflow_id = workflow_url.rstrip("/").split("/")[-1] + print(f"Downloading native artifacts from workflow {workflow_id}...") with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str: artifacts_dir = Path(artifacts_dir_str) _download_artifacts(workflow_id, artifacts_dir) - install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS) + install_binary_components( + artifacts_dir, + vendor_dir, + BINARY_TARGETS, + [name for name in components if name in BINARY_COMPONENTS], + ) - fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST) + if "rg" in components: + print("Fetching ripgrep binaries...") + fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST) print(f"Installed native dependencies into {vendor_dir}") return 0 @@ -124,6 +166,8 @@ def fetch_rg( results: dict[str, Path] = {} max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1))) + print("Installing ripgrep binaries for targets: " + ", ".join(targets)) + with ThreadPoolExecutor(max_workers=max_workers) as executor: future_map = { executor.submit( @@ -140,6 +184,7 @@ def fetch_rg( for future in as_completed(future_map): target = future_map[future] results[target] = future.result() + print(f" installed ripgrep for {target}") return [results[target] for target in targets] @@ -158,40 +203,60 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None: subprocess.check_call(cmd) -def install_codex_binaries( - artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str] -) -> list[Path]: +def install_binary_components( + artifacts_dir: Path, + vendor_dir: Path, + targets: Iterable[str], + component_names: Sequence[str], +) -> None: + selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS] + if not selected_components: + return + targets = list(targets) if not targets: - return [] + return - results: dict[str, Path] = {} - max_workers = min(len(targets), max(1, (os.cpu_count() or 1))) - - with ThreadPoolExecutor(max_workers=max_workers) as executor: - future_map = { - executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target - for target in targets - } - - for future in as_completed(future_map): - target = future_map[future] - results[target] = future.result() - - return [results[target] for target in targets] + for component in selected_components: + print( + f"Installing {component.binary_basename} binaries for targets: " + + ", ".join(targets) + ) + max_workers = min(len(targets), max(1, (os.cpu_count() or 1))) + with ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = { + executor.submit( + _install_single_binary, + artifacts_dir, + vendor_dir, + target, + component, + ): target + for target in targets + } + for future in as_completed(futures): + installed_path = future.result() + print(f" installed {installed_path}") -def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path: +def _install_single_binary( + artifacts_dir: Path, + vendor_dir: Path, + target: str, + component: BinaryComponent, +) -> Path: artifact_subdir = artifacts_dir / target - archive_name = _archive_name_for_target(target) + archive_name = _archive_name_for_target(component.artifact_prefix, target) archive_path = artifact_subdir / archive_name if not archive_path.exists(): raise FileNotFoundError(f"Expected artifact not found: {archive_path}") - dest_dir = vendor_dir / target / "codex" + dest_dir = vendor_dir / target / component.dest_dir dest_dir.mkdir(parents=True, exist_ok=True) - binary_name = "codex.exe" if "windows" in target else "codex" + binary_name = ( + f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename + ) dest = dest_dir / binary_name dest.unlink(missing_ok=True) extract_archive(archive_path, "zst", None, dest) @@ -200,10 +265,10 @@ def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: return dest -def _archive_name_for_target(target: str) -> str: +def _archive_name_for_target(artifact_prefix: str, target: str) -> str: if "windows" in target: - return f"codex-{target}.exe.zst" - return f"codex-{target}.zst" + return f"{artifact_prefix}-{target}.exe.zst" + return f"{artifact_prefix}-{target}.zst" def _fetch_single_rg( diff --git a/codex-rs/process-hardening/README.md b/codex-rs/process-hardening/README.md new file mode 100644 index 00000000..66a8060a --- /dev/null +++ b/codex-rs/process-hardening/README.md @@ -0,0 +1,7 @@ +# codex-process-hardening + +This crate provides `pre_main_hardening()`, which is designed to be called pre-`main()` (using `#[ctor::ctor]`) to perform various process hardening steps, such as + +- disabling core dumps +- disabling ptrace attach on Linux and macOS +- removing dangerous environment variables such as `LD_PRELOAD` and `DYLD_*` diff --git a/codex-rs/responses-api-proxy/README.md b/codex-rs/responses-api-proxy/README.md index 4f5304a5..4b74fe10 100644 --- a/codex-rs/responses-api-proxy/README.md +++ b/codex-rs/responses-api-proxy/README.md @@ -4,12 +4,12 @@ A strict HTTP proxy that only forwards `POST` requests to `/v1/responses` to the ## Expected Usage -**IMPORTANT:** This is designed to be used with `CODEX_SECURE_MODE=1` so that an unprivileged user cannot inspect or tamper with this process. Though if `--http-shutdown` is specified, an unprivileged user _can_ shutdown the server. +**IMPORTANT:** `codex-responses-api-proxy` is designed to be run by a privileged user with access to `OPENAI_API_KEY` so that an unprivileged user cannot inspect or tamper with the process. Though if `--http-shutdown` is specified, an unprivileged user _can_ make a `GET` request to `/shutdown` to shutdown the server, as an unprivileged could not send `SIGTERM` to kill the process. -A privileged user (i.e., `root` or a user with `sudo`) who has access to `OPENAI_API_KEY` would run the following to start the server: +A privileged user (i.e., `root` or a user with `sudo`) who has access to `OPENAI_API_KEY` would run the following to start the server, as `codex-responses-api-proxy` reads the auth token from `stdin`: ```shell -printenv OPENAI_API_KEY | CODEX_SECURE_MODE=1 codex responses-api-proxy --http-shutdown --server-info /tmp/server-info.json +printenv OPENAI_API_KEY | codex-responses-api-proxy --http-shutdown --server-info /tmp/server-info.json ``` A non-privileged user would then run Codex as follows, specifying the `model_provider` dynamically: @@ -22,7 +22,7 @@ codex exec -c "model_providers.openai-proxy={ name = 'OpenAI Proxy', base_url = 'Your prompt here' ``` -When the unprivileged user was finished, they could shutdown the server using `curl` (since `kill -9` is not an option): +When the unprivileged user was finished, they could shutdown the server using `curl` (since `kill -SIGTERM` is not an option): ```shell curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown" @@ -30,17 +30,17 @@ curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown" ## Behavior -- Reads the API key from `stdin`. All callers should pipe the key in (for example, `printenv OPENAI_API_KEY | codex responses-api-proxy`). +- Reads the API key from `stdin`. All callers should pipe the key in (for example, `printenv OPENAI_API_KEY | codex-responses-api-proxy`). - Formats the header value as `Bearer ` and attempts to `mlock(2)` the memory holding that header so it is not swapped to disk. - Listens on the provided port or an ephemeral port if `--port` is not specified. - Accepts exactly `POST /v1/responses` (no query string). The request body is forwarded to `https://api.openai.com/v1/responses` with `Authorization: Bearer ` set. All original request headers (except any incoming `Authorization`) are forwarded upstream. For other requests, it responds with `403`. - Optionally writes a single-line JSON file with server info, currently `{ "port": }`. -- Optional `--http-shutdown` enables `GET /shutdown` to terminate the process with exit code 0. This allows one user (e.g., root) to start the proxy and another unprivileged user on the host to shut it down. +- Optional `--http-shutdown` enables `GET /shutdown` to terminate the process with exit code 0. This allows one user (e.g., `root`) to start the proxy and another unprivileged user on the host to shut it down. ## CLI ``` -responses-api-proxy [--port ] [--server-info ] [--http-shutdown] +codex-responses-api-proxy [--port ] [--server-info ] [--http-shutdown] ``` - `--port `: Port to bind on `127.0.0.1`. If omitted, an ephemeral port is chosen. @@ -51,3 +51,19 @@ responses-api-proxy [--port ] [--server-info ] [--http-shutdown] - Only `POST /v1/responses` is permitted. No query strings are allowed. - All request headers are forwarded to the upstream call (aside from overriding `Authorization`). Response status and content-type are mirrored from upstream. + +## Hardening Details + +Care is taken to restrict access/copying to the value of `OPENAI_API_KEY` retained in memory: + +- We leverage [`codex_process_hardening`](https://github.com/openai/codex/blob/main/codex-rs/process-hardening/README.md) so `codex-responses-api-proxy` is run with standard process-hardening techniques. +- At startup, we allocate a `1024` byte buffer on the stack and write `"Bearer "` as the first `7` bytes. +- We then read from `stdin`, copying the contents into the buffer after `"Bearer "`. +- After verifying the key matches `/^[a-zA-Z0-9_-]+$/` (and does not exceed the buffer), we create a `String` from that buffer (so the data is now on the heap). +- We zero out the stack-allocated buffer using https://crates.io/crates/zeroize so it is not optimized away by the compiler. +- We invoke `.leak()` on the `String` so we can treat its contents as a `&'static str`, as it will live for the rest of the process. +- On UNIX, we `mlock(2)` the memory backing the `&'static str`. +- When using the `&'static str` when building an HTTP request, we use `HeaderValue::from_static()` to avoid copying the `&str`. +- We also invoke `.set_sensitive(true)` on the `HeaderValue`, which in theory indicates to other parts of the HTTP stack that the header should be treated with "special care" to avoid leakage: + +https://github.com/hyperium/http/blob/439d1c50d71e3be3204b6c4a1bf2255ed78e1f93/src/header/value.rs#L346-L376 diff --git a/codex-rs/responses-api-proxy/npm/README.md b/codex-rs/responses-api-proxy/npm/README.md new file mode 100644 index 00000000..3458e527 --- /dev/null +++ b/codex-rs/responses-api-proxy/npm/README.md @@ -0,0 +1,13 @@ +# @openai/codex-responses-api-proxy + +

npm i -g @openai/codex-responses-api-proxy to install codex-responses-api-proxy

+ +This package distributes the prebuilt [Codex Responses API proxy binary](https://github.com/openai/codex/tree/main/codex-rs/responses-api-proxy) for macOS, Linux, and Windows. + +To see available options, run: + +``` +node ./bin/codex-responses-api-proxy.js --help +``` + +Refer to [`codex-rs/responses-api-proxy/README.md`](https://github.com/openai/codex/blob/main/codex-rs/responses-api-proxy/README.md) for detailed documentation. diff --git a/codex-rs/responses-api-proxy/npm/bin/codex-responses-api-proxy.js b/codex-rs/responses-api-proxy/npm/bin/codex-responses-api-proxy.js new file mode 100755 index 00000000..e2c3ee7d --- /dev/null +++ b/codex-rs/responses-api-proxy/npm/bin/codex-responses-api-proxy.js @@ -0,0 +1,97 @@ +#!/usr/bin/env node +// Entry point for the Codex responses API proxy binary. + +import { spawn } from "node:child_process"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +function determineTargetTriple(platform, arch) { + switch (platform) { + case "linux": + case "android": + if (arch === "x64") { + return "x86_64-unknown-linux-musl"; + } + if (arch === "arm64") { + return "aarch64-unknown-linux-musl"; + } + break; + case "darwin": + if (arch === "x64") { + return "x86_64-apple-darwin"; + } + if (arch === "arm64") { + return "aarch64-apple-darwin"; + } + break; + case "win32": + if (arch === "x64") { + return "x86_64-pc-windows-msvc"; + } + if (arch === "arm64") { + return "aarch64-pc-windows-msvc"; + } + break; + default: + break; + } + return null; +} + +const targetTriple = determineTargetTriple(process.platform, process.arch); +if (!targetTriple) { + throw new Error( + `Unsupported platform: ${process.platform} (${process.arch})`, + ); +} + +const vendorRoot = path.join(__dirname, "..", "vendor"); +const archRoot = path.join(vendorRoot, targetTriple); +const binaryBaseName = "codex-responses-api-proxy"; +const binaryPath = path.join( + archRoot, + binaryBaseName, + process.platform === "win32" ? `${binaryBaseName}.exe` : binaryBaseName, +); + +const child = spawn(binaryPath, process.argv.slice(2), { + stdio: "inherit", +}); + +child.on("error", (err) => { + console.error(err); + process.exit(1); +}); + +const forwardSignal = (signal) => { + if (!child.killed) { + try { + child.kill(signal); + } catch { + /* ignore */ + } + } +}; + +["SIGINT", "SIGTERM", "SIGHUP"].forEach((sig) => { + process.on(sig, () => forwardSignal(sig)); +}); + +const childResult = await new Promise((resolve) => { + child.on("exit", (code, signal) => { + if (signal) { + resolve({ type: "signal", signal }); + } else { + resolve({ type: "code", exitCode: code ?? 1 }); + } + }); +}); + +if (childResult.type === "signal") { + process.kill(process.pid, childResult.signal); +} else { + process.exit(childResult.exitCode); +} diff --git a/codex-rs/responses-api-proxy/npm/package.json b/codex-rs/responses-api-proxy/npm/package.json new file mode 100644 index 00000000..f3956a77 --- /dev/null +++ b/codex-rs/responses-api-proxy/npm/package.json @@ -0,0 +1,21 @@ +{ + "name": "@openai/codex-responses-api-proxy", + "version": "0.0.0-dev", + "license": "Apache-2.0", + "bin": { + "codex-responses-api-proxy": "bin/codex-responses-api-proxy.js" + }, + "type": "module", + "engines": { + "node": ">=16" + }, + "files": [ + "bin", + "vendor" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/openai/codex.git", + "directory": "codex-rs/responses-api-proxy/npm" + } +} diff --git a/codex-rs/responses-api-proxy/src/read_api_key.rs b/codex-rs/responses-api-proxy/src/read_api_key.rs index 8ffad2aa..1dda92f3 100644 --- a/codex-rs/responses-api-proxy/src/read_api_key.rs +++ b/codex-rs/responses-api-proxy/src/read_api_key.rs @@ -54,9 +54,16 @@ where )); } + if let Err(err) = validate_auth_header_bytes(&buf[AUTH_HEADER_PREFIX.len()..total]) { + buf.zeroize(); + return Err(err); + } + let header_str = match std::str::from_utf8(&buf[..total]) { Ok(value) => value, Err(err) => { + // In theory, validate_auth_header_bytes() should have caught + // any invalid UTF-8 sequences, but just in case... buf.zeroize(); return Err(err).context("reading Authorization header from stdin as UTF-8"); } @@ -113,6 +120,21 @@ fn mlock_str(value: &str) { #[cfg(not(unix))] fn mlock_str(_value: &str) {} +/// The key should match /^[A-Za-z0-9\-_]+$/. Ensure there is no funny business +/// with NUL characters and whatnot. +fn validate_auth_header_bytes(key_bytes: &[u8]) -> Result<()> { + if key_bytes + .iter() + .all(|byte| byte.is_ascii_alphanumeric() || matches!(byte, b'-' | b'_')) + { + return Ok(()); + } + + Err(anyhow!( + "OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'" + )) +} + #[cfg(test)] mod tests { use super::*; @@ -158,7 +180,7 @@ mod tests { }) .unwrap_err(); let message = format!("{err:#}"); - assert!(message.contains("too large")); + assert!(message.contains("OPENAI_API_KEY is too large to fit in the 512-byte buffer")); } #[test] @@ -180,6 +202,23 @@ mod tests { .unwrap_err(); let message = format!("{err:#}"); - assert!(message.contains("UTF-8")); + assert!( + message.contains("OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'") + ); + } + + #[test] + fn errors_on_invalid_characters() { + let err = read_auth_header_with(|buf| { + let data = b"sk-abc!23"; + buf[..data.len()].copy_from_slice(data); + Ok(data.len()) + }) + .unwrap_err(); + + let message = format!("{err:#}"); + assert!( + message.contains("OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'") + ); } }