feat: introduce npm module for codex-responses-api-proxy (#4417)
This PR expands `.github/workflows/rust-release.yml` so that it also builds and publishes the `npm` module for `@openai/codex-responses-api-proxy` in addition to `@openai/codex`. Note both `npm` modules are similar, in that they each contain a single `.js` file that is a thin launcher around the appropriate native executable. (Since we have a minimal dependency on Node.js, I also lowered the minimum version from 20 to 16 and verified that works on my machine.) As part of this change, we tighten up some of the docs around `codex-responses-api-proxy` and ensure the details regarding protecting the `OPENAI_API_KEY` in memory match the implementation. To test the `npm` build process, I ran: ``` ./codex-cli/scripts/build_npm_package.py --package codex-responses-api-proxy --version 0.43.0-alpha.3 ``` which stages the `npm` module for `@openai/codex-responses-api-proxy` in a temp directory, using the binary artifacts from https://github.com/openai/codex/releases/tag/rust-v0.43.0-alpha.3.
This commit is contained in:
30
.github/workflows/rust-release.yml
vendored
30
.github/workflows/rust-release.yml
vendored
@@ -218,17 +218,30 @@ jobs:
|
|||||||
|
|
||||||
# build_npm_package.py requires DotSlash when staging releases.
|
# build_npm_package.py requires DotSlash when staging releases.
|
||||||
- uses: facebook/install-dotslash@v2
|
- uses: facebook/install-dotslash@v2
|
||||||
- name: Stage npm package
|
- name: Stage codex CLI npm package
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||||
./codex-cli/scripts/build_npm_package.py \
|
./codex-cli/scripts/build_npm_package.py \
|
||||||
|
--package codex \
|
||||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||||
--staging-dir "${TMP_DIR}" \
|
--staging-dir "${TMP_DIR}" \
|
||||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||||
|
|
||||||
|
- name: Stage responses API proxy npm package
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
TMP_DIR="${RUNNER_TEMP}/npm-stage-responses"
|
||||||
|
./codex-cli/scripts/build_npm_package.py \
|
||||||
|
--package codex-responses-api-proxy \
|
||||||
|
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||||
|
--staging-dir "${TMP_DIR}" \
|
||||||
|
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||||
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
@@ -271,7 +284,7 @@ jobs:
|
|||||||
- name: Update npm
|
- name: Update npm
|
||||||
run: npm install -g npm@latest
|
run: npm install -g npm@latest
|
||||||
|
|
||||||
- name: Download npm tarball from release
|
- name: Download npm tarballs from release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
@@ -283,6 +296,10 @@ jobs:
|
|||||||
--repo "${GITHUB_REPOSITORY}" \
|
--repo "${GITHUB_REPOSITORY}" \
|
||||||
--pattern "codex-npm-${version}.tgz" \
|
--pattern "codex-npm-${version}.tgz" \
|
||||||
--dir dist/npm
|
--dir dist/npm
|
||||||
|
gh release download "$tag" \
|
||||||
|
--repo "${GITHUB_REPOSITORY}" \
|
||||||
|
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||||
|
--dir dist/npm
|
||||||
|
|
||||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||||
- name: Publish to npm
|
- name: Publish to npm
|
||||||
@@ -296,7 +313,14 @@ jobs:
|
|||||||
tag_args+=(--tag "${NPM_TAG}")
|
tag_args+=(--tag "${NPM_TAG}")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}"
|
tarballs=(
|
||||||
|
"codex-npm-${VERSION}.tgz"
|
||||||
|
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||||
|
)
|
||||||
|
|
||||||
|
for tarball in "${tarballs[@]}"; do
|
||||||
|
npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}"
|
||||||
|
done
|
||||||
|
|
||||||
update-branch:
|
update-branch:
|
||||||
name: Update latest-alpha-cli branch
|
name: Update latest-alpha-cli branch
|
||||||
|
|||||||
@@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS:
|
|||||||
| Requirement | Details |
|
| Requirement | Details |
|
||||||
| --------------------------- | --------------------------------------------------------------- |
|
| --------------------------- | --------------------------------------------------------------- |
|
||||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||||
| Node.js | **22 or newer** (LTS recommended) |
|
| Node.js | **16 or newer** (Node 20 LTS recommended) |
|
||||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||||
| RAM | 4-GB minimum (8-GB recommended) |
|
| RAM | 4-GB minimum (8-GB recommended) |
|
||||||
|
|
||||||
@@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
|
|||||||
<details>
|
<details>
|
||||||
<summary>Does it work on Windows?</summary>
|
<summary>Does it work on Windows?</summary>
|
||||||
|
|
||||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
// Unified entry point for the Codex CLI.
|
// Unified entry point for the Codex CLI.
|
||||||
|
|
||||||
|
import { spawn } from "node:child_process";
|
||||||
import { existsSync } from "fs";
|
import { existsSync } from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import { fileURLToPath } from "url";
|
import { fileURLToPath } from "url";
|
||||||
@@ -68,7 +69,6 @@ const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
|||||||
// executing. This allows us to forward those signals to the child process
|
// executing. This allows us to forward those signals to the child process
|
||||||
// and guarantees that when either the child terminates or the parent
|
// and guarantees that when either the child terminates or the parent
|
||||||
// receives a fatal signal, both processes exit in a predictable manner.
|
// receives a fatal signal, both processes exit in a predictable manner.
|
||||||
const { spawn } = await import("child_process");
|
|
||||||
|
|
||||||
function getUpdatedPath(newDirs) {
|
function getUpdatedPath(newDirs) {
|
||||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||||
|
|||||||
2
codex-cli/package-lock.json
generated
2
codex-cli/package-lock.json
generated
@@ -11,7 +11,7 @@
|
|||||||
"codex": "bin/codex.js"
|
"codex": "bin/codex.js"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20"
|
"node": ">=16"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20"
|
"node": ">=16"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"bin",
|
"bin",
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from pathlib import Path
|
|||||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||||
|
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||||
GITHUB_REPO = "openai/codex"
|
GITHUB_REPO = "openai/codex"
|
||||||
|
|
||||||
# The docs are not clear on what the expected value/format of
|
# The docs are not clear on what the expected value/format of
|
||||||
@@ -23,6 +24,12 @@ WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
|||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
def parse_args() -> argparse.Namespace:
|
||||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--package",
|
||||||
|
choices=("codex", "codex-responses-api-proxy"),
|
||||||
|
default="codex",
|
||||||
|
help="Which npm package to stage (default: codex).",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--version",
|
"--version",
|
||||||
help="Version number to write to package.json inside the staged package.",
|
help="Version number to write to package.json inside the staged package.",
|
||||||
@@ -63,6 +70,7 @@ def parse_args() -> argparse.Namespace:
|
|||||||
def main() -> int:
|
def main() -> int:
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
|
package = args.package
|
||||||
version = args.version
|
version = args.version
|
||||||
release_version = args.release_version
|
release_version = args.release_version
|
||||||
if release_version:
|
if release_version:
|
||||||
@@ -76,7 +84,7 @@ def main() -> int:
|
|||||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stage_sources(staging_dir, version)
|
stage_sources(staging_dir, version, package)
|
||||||
|
|
||||||
workflow_url = args.workflow_url
|
workflow_url = args.workflow_url
|
||||||
resolved_head_sha: str | None = None
|
resolved_head_sha: str | None = None
|
||||||
@@ -100,16 +108,23 @@ def main() -> int:
|
|||||||
if not workflow_url:
|
if not workflow_url:
|
||||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||||
|
|
||||||
install_native_binaries(staging_dir, workflow_url)
|
install_native_binaries(staging_dir, workflow_url, package)
|
||||||
|
|
||||||
if release_version:
|
if release_version:
|
||||||
staging_dir_str = str(staging_dir)
|
staging_dir_str = str(staging_dir)
|
||||||
print(
|
if package == "codex":
|
||||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
print(
|
||||||
"Verify the CLI:\n"
|
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
"Verify the CLI:\n"
|
||||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||||
)
|
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||||
|
"Verify the responses API proxy:\n"
|
||||||
|
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f"Staged package in {staging_dir}")
|
print(f"Staged package in {staging_dir}")
|
||||||
|
|
||||||
@@ -136,20 +151,34 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
|||||||
return temp_dir, True
|
return temp_dir, True
|
||||||
|
|
||||||
|
|
||||||
def stage_sources(staging_dir: Path, version: str) -> None:
|
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||||
bin_dir = staging_dir / "bin"
|
bin_dir = staging_dir / "bin"
|
||||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
if package == "codex":
|
||||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||||
if rg_manifest.exists():
|
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
if rg_manifest.exists():
|
||||||
|
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||||
|
|
||||||
readme_src = REPO_ROOT / "README.md"
|
readme_src = REPO_ROOT / "README.md"
|
||||||
if readme_src.exists():
|
if readme_src.exists():
|
||||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||||
|
|
||||||
with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh:
|
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||||
|
elif package == "codex-responses-api-proxy":
|
||||||
|
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||||
|
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||||
|
|
||||||
|
readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md"
|
||||||
|
if readme_src.exists():
|
||||||
|
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||||
|
|
||||||
|
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Unknown package '{package}'.")
|
||||||
|
|
||||||
|
with open(package_json_path, "r", encoding="utf-8") as fh:
|
||||||
package_json = json.load(fh)
|
package_json = json.load(fh)
|
||||||
package_json["version"] = version
|
package_json["version"] = version
|
||||||
|
|
||||||
@@ -158,8 +187,20 @@ def stage_sources(staging_dir: Path, version: str) -> None:
|
|||||||
out.write("\n")
|
out.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def install_native_binaries(staging_dir: Path, workflow_url: str) -> None:
|
def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None:
|
||||||
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url, str(staging_dir)]
|
package_components = {
|
||||||
|
"codex": ["codex", "rg"],
|
||||||
|
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||||
|
}
|
||||||
|
|
||||||
|
components = package_components.get(package)
|
||||||
|
if components is None:
|
||||||
|
raise RuntimeError(f"Unknown package '{package}'.")
|
||||||
|
|
||||||
|
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url]
|
||||||
|
for component in components:
|
||||||
|
cmd.extend(["--component", component])
|
||||||
|
cmd.append(str(staging_dir))
|
||||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import subprocess
|
|||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from dataclasses import dataclass
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Iterable, Sequence
|
from typing import Iterable, Sequence
|
||||||
@@ -20,7 +21,7 @@ CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
|||||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||||
VENDOR_DIR_NAME = "vendor"
|
VENDOR_DIR_NAME = "vendor"
|
||||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||||
CODEX_TARGETS = (
|
BINARY_TARGETS = (
|
||||||
"x86_64-unknown-linux-musl",
|
"x86_64-unknown-linux-musl",
|
||||||
"aarch64-unknown-linux-musl",
|
"aarch64-unknown-linux-musl",
|
||||||
"x86_64-apple-darwin",
|
"x86_64-apple-darwin",
|
||||||
@@ -29,6 +30,27 @@ CODEX_TARGETS = (
|
|||||||
"aarch64-pc-windows-msvc",
|
"aarch64-pc-windows-msvc",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class BinaryComponent:
|
||||||
|
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||||
|
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||||
|
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_COMPONENTS = {
|
||||||
|
"codex": BinaryComponent(
|
||||||
|
artifact_prefix="codex",
|
||||||
|
dest_dir="codex",
|
||||||
|
binary_basename="codex",
|
||||||
|
),
|
||||||
|
"codex-responses-api-proxy": BinaryComponent(
|
||||||
|
artifact_prefix="codex-responses-api-proxy",
|
||||||
|
dest_dir="codex-responses-api-proxy",
|
||||||
|
binary_basename="codex-responses-api-proxy",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||||
@@ -50,6 +72,16 @@ def parse_args() -> argparse.Namespace:
|
|||||||
"known good run when omitted."
|
"known good run when omitted."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--component",
|
||||||
|
dest="components",
|
||||||
|
action="append",
|
||||||
|
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||||
|
help=(
|
||||||
|
"Limit installation to the specified components."
|
||||||
|
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||||
|
),
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"root",
|
"root",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
@@ -69,18 +101,28 @@ def main() -> int:
|
|||||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
components = args.components or ["codex", "rg"]
|
||||||
|
|
||||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||||
if not workflow_url:
|
if not workflow_url:
|
||||||
workflow_url = DEFAULT_WORKFLOW_URL
|
workflow_url = DEFAULT_WORKFLOW_URL
|
||||||
|
|
||||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||||
|
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||||
artifacts_dir = Path(artifacts_dir_str)
|
artifacts_dir = Path(artifacts_dir_str)
|
||||||
_download_artifacts(workflow_id, artifacts_dir)
|
_download_artifacts(workflow_id, artifacts_dir)
|
||||||
install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS)
|
install_binary_components(
|
||||||
|
artifacts_dir,
|
||||||
|
vendor_dir,
|
||||||
|
BINARY_TARGETS,
|
||||||
|
[name for name in components if name in BINARY_COMPONENTS],
|
||||||
|
)
|
||||||
|
|
||||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
if "rg" in components:
|
||||||
|
print("Fetching ripgrep binaries...")
|
||||||
|
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||||
|
|
||||||
print(f"Installed native dependencies into {vendor_dir}")
|
print(f"Installed native dependencies into {vendor_dir}")
|
||||||
return 0
|
return 0
|
||||||
@@ -124,6 +166,8 @@ def fetch_rg(
|
|||||||
results: dict[str, Path] = {}
|
results: dict[str, Path] = {}
|
||||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||||
|
|
||||||
|
print("Installing ripgrep binaries for targets: " + ", ".join(targets))
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||||
future_map = {
|
future_map = {
|
||||||
executor.submit(
|
executor.submit(
|
||||||
@@ -140,6 +184,7 @@ def fetch_rg(
|
|||||||
for future in as_completed(future_map):
|
for future in as_completed(future_map):
|
||||||
target = future_map[future]
|
target = future_map[future]
|
||||||
results[target] = future.result()
|
results[target] = future.result()
|
||||||
|
print(f" installed ripgrep for {target}")
|
||||||
|
|
||||||
return [results[target] for target in targets]
|
return [results[target] for target in targets]
|
||||||
|
|
||||||
@@ -158,40 +203,60 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
|||||||
subprocess.check_call(cmd)
|
subprocess.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
def install_codex_binaries(
|
def install_binary_components(
|
||||||
artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str]
|
artifacts_dir: Path,
|
||||||
) -> list[Path]:
|
vendor_dir: Path,
|
||||||
|
targets: Iterable[str],
|
||||||
|
component_names: Sequence[str],
|
||||||
|
) -> None:
|
||||||
|
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||||
|
if not selected_components:
|
||||||
|
return
|
||||||
|
|
||||||
targets = list(targets)
|
targets = list(targets)
|
||||||
if not targets:
|
if not targets:
|
||||||
return []
|
return
|
||||||
|
|
||||||
results: dict[str, Path] = {}
|
for component in selected_components:
|
||||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
print(
|
||||||
|
f"Installing {component.binary_basename} binaries for targets: "
|
||||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
+ ", ".join(targets)
|
||||||
future_map = {
|
)
|
||||||
executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target
|
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||||
for target in targets
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||||
}
|
futures = {
|
||||||
|
executor.submit(
|
||||||
for future in as_completed(future_map):
|
_install_single_binary,
|
||||||
target = future_map[future]
|
artifacts_dir,
|
||||||
results[target] = future.result()
|
vendor_dir,
|
||||||
|
target,
|
||||||
return [results[target] for target in targets]
|
component,
|
||||||
|
): target
|
||||||
|
for target in targets
|
||||||
|
}
|
||||||
|
for future in as_completed(futures):
|
||||||
|
installed_path = future.result()
|
||||||
|
print(f" installed {installed_path}")
|
||||||
|
|
||||||
|
|
||||||
def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path:
|
def _install_single_binary(
|
||||||
|
artifacts_dir: Path,
|
||||||
|
vendor_dir: Path,
|
||||||
|
target: str,
|
||||||
|
component: BinaryComponent,
|
||||||
|
) -> Path:
|
||||||
artifact_subdir = artifacts_dir / target
|
artifact_subdir = artifacts_dir / target
|
||||||
archive_name = _archive_name_for_target(target)
|
archive_name = _archive_name_for_target(component.artifact_prefix, target)
|
||||||
archive_path = artifact_subdir / archive_name
|
archive_path = artifact_subdir / archive_name
|
||||||
if not archive_path.exists():
|
if not archive_path.exists():
|
||||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||||
|
|
||||||
dest_dir = vendor_dir / target / "codex"
|
dest_dir = vendor_dir / target / component.dest_dir
|
||||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
binary_name = "codex.exe" if "windows" in target else "codex"
|
binary_name = (
|
||||||
|
f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename
|
||||||
|
)
|
||||||
dest = dest_dir / binary_name
|
dest = dest_dir / binary_name
|
||||||
dest.unlink(missing_ok=True)
|
dest.unlink(missing_ok=True)
|
||||||
extract_archive(archive_path, "zst", None, dest)
|
extract_archive(archive_path, "zst", None, dest)
|
||||||
@@ -200,10 +265,10 @@ def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target:
|
|||||||
return dest
|
return dest
|
||||||
|
|
||||||
|
|
||||||
def _archive_name_for_target(target: str) -> str:
|
def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
|
||||||
if "windows" in target:
|
if "windows" in target:
|
||||||
return f"codex-{target}.exe.zst"
|
return f"{artifact_prefix}-{target}.exe.zst"
|
||||||
return f"codex-{target}.zst"
|
return f"{artifact_prefix}-{target}.zst"
|
||||||
|
|
||||||
|
|
||||||
def _fetch_single_rg(
|
def _fetch_single_rg(
|
||||||
|
|||||||
7
codex-rs/process-hardening/README.md
Normal file
7
codex-rs/process-hardening/README.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# codex-process-hardening
|
||||||
|
|
||||||
|
This crate provides `pre_main_hardening()`, which is designed to be called pre-`main()` (using `#[ctor::ctor]`) to perform various process hardening steps, such as
|
||||||
|
|
||||||
|
- disabling core dumps
|
||||||
|
- disabling ptrace attach on Linux and macOS
|
||||||
|
- removing dangerous environment variables such as `LD_PRELOAD` and `DYLD_*`
|
||||||
@@ -4,12 +4,12 @@ A strict HTTP proxy that only forwards `POST` requests to `/v1/responses` to the
|
|||||||
|
|
||||||
## Expected Usage
|
## Expected Usage
|
||||||
|
|
||||||
**IMPORTANT:** This is designed to be used with `CODEX_SECURE_MODE=1` so that an unprivileged user cannot inspect or tamper with this process. Though if `--http-shutdown` is specified, an unprivileged user _can_ shutdown the server.
|
**IMPORTANT:** `codex-responses-api-proxy` is designed to be run by a privileged user with access to `OPENAI_API_KEY` so that an unprivileged user cannot inspect or tamper with the process. Though if `--http-shutdown` is specified, an unprivileged user _can_ make a `GET` request to `/shutdown` to shutdown the server, as an unprivileged could not send `SIGTERM` to kill the process.
|
||||||
|
|
||||||
A privileged user (i.e., `root` or a user with `sudo`) who has access to `OPENAI_API_KEY` would run the following to start the server:
|
A privileged user (i.e., `root` or a user with `sudo`) who has access to `OPENAI_API_KEY` would run the following to start the server, as `codex-responses-api-proxy` reads the auth token from `stdin`:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
printenv OPENAI_API_KEY | CODEX_SECURE_MODE=1 codex responses-api-proxy --http-shutdown --server-info /tmp/server-info.json
|
printenv OPENAI_API_KEY | codex-responses-api-proxy --http-shutdown --server-info /tmp/server-info.json
|
||||||
```
|
```
|
||||||
|
|
||||||
A non-privileged user would then run Codex as follows, specifying the `model_provider` dynamically:
|
A non-privileged user would then run Codex as follows, specifying the `model_provider` dynamically:
|
||||||
@@ -22,7 +22,7 @@ codex exec -c "model_providers.openai-proxy={ name = 'OpenAI Proxy', base_url =
|
|||||||
'Your prompt here'
|
'Your prompt here'
|
||||||
```
|
```
|
||||||
|
|
||||||
When the unprivileged user was finished, they could shutdown the server using `curl` (since `kill -9` is not an option):
|
When the unprivileged user was finished, they could shutdown the server using `curl` (since `kill -SIGTERM` is not an option):
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown"
|
curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown"
|
||||||
@@ -30,17 +30,17 @@ curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown"
|
|||||||
|
|
||||||
## Behavior
|
## Behavior
|
||||||
|
|
||||||
- Reads the API key from `stdin`. All callers should pipe the key in (for example, `printenv OPENAI_API_KEY | codex responses-api-proxy`).
|
- Reads the API key from `stdin`. All callers should pipe the key in (for example, `printenv OPENAI_API_KEY | codex-responses-api-proxy`).
|
||||||
- Formats the header value as `Bearer <key>` and attempts to `mlock(2)` the memory holding that header so it is not swapped to disk.
|
- Formats the header value as `Bearer <key>` and attempts to `mlock(2)` the memory holding that header so it is not swapped to disk.
|
||||||
- Listens on the provided port or an ephemeral port if `--port` is not specified.
|
- Listens on the provided port or an ephemeral port if `--port` is not specified.
|
||||||
- Accepts exactly `POST /v1/responses` (no query string). The request body is forwarded to `https://api.openai.com/v1/responses` with `Authorization: Bearer <key>` set. All original request headers (except any incoming `Authorization`) are forwarded upstream. For other requests, it responds with `403`.
|
- Accepts exactly `POST /v1/responses` (no query string). The request body is forwarded to `https://api.openai.com/v1/responses` with `Authorization: Bearer <key>` set. All original request headers (except any incoming `Authorization`) are forwarded upstream. For other requests, it responds with `403`.
|
||||||
- Optionally writes a single-line JSON file with server info, currently `{ "port": <u16> }`.
|
- Optionally writes a single-line JSON file with server info, currently `{ "port": <u16> }`.
|
||||||
- Optional `--http-shutdown` enables `GET /shutdown` to terminate the process with exit code 0. This allows one user (e.g., root) to start the proxy and another unprivileged user on the host to shut it down.
|
- Optional `--http-shutdown` enables `GET /shutdown` to terminate the process with exit code 0. This allows one user (e.g., `root`) to start the proxy and another unprivileged user on the host to shut it down.
|
||||||
|
|
||||||
## CLI
|
## CLI
|
||||||
|
|
||||||
```
|
```
|
||||||
responses-api-proxy [--port <PORT>] [--server-info <FILE>] [--http-shutdown]
|
codex-responses-api-proxy [--port <PORT>] [--server-info <FILE>] [--http-shutdown]
|
||||||
```
|
```
|
||||||
|
|
||||||
- `--port <PORT>`: Port to bind on `127.0.0.1`. If omitted, an ephemeral port is chosen.
|
- `--port <PORT>`: Port to bind on `127.0.0.1`. If omitted, an ephemeral port is chosen.
|
||||||
@@ -51,3 +51,19 @@ responses-api-proxy [--port <PORT>] [--server-info <FILE>] [--http-shutdown]
|
|||||||
|
|
||||||
- Only `POST /v1/responses` is permitted. No query strings are allowed.
|
- Only `POST /v1/responses` is permitted. No query strings are allowed.
|
||||||
- All request headers are forwarded to the upstream call (aside from overriding `Authorization`). Response status and content-type are mirrored from upstream.
|
- All request headers are forwarded to the upstream call (aside from overriding `Authorization`). Response status and content-type are mirrored from upstream.
|
||||||
|
|
||||||
|
## Hardening Details
|
||||||
|
|
||||||
|
Care is taken to restrict access/copying to the value of `OPENAI_API_KEY` retained in memory:
|
||||||
|
|
||||||
|
- We leverage [`codex_process_hardening`](https://github.com/openai/codex/blob/main/codex-rs/process-hardening/README.md) so `codex-responses-api-proxy` is run with standard process-hardening techniques.
|
||||||
|
- At startup, we allocate a `1024` byte buffer on the stack and write `"Bearer "` as the first `7` bytes.
|
||||||
|
- We then read from `stdin`, copying the contents into the buffer after `"Bearer "`.
|
||||||
|
- After verifying the key matches `/^[a-zA-Z0-9_-]+$/` (and does not exceed the buffer), we create a `String` from that buffer (so the data is now on the heap).
|
||||||
|
- We zero out the stack-allocated buffer using https://crates.io/crates/zeroize so it is not optimized away by the compiler.
|
||||||
|
- We invoke `.leak()` on the `String` so we can treat its contents as a `&'static str`, as it will live for the rest of the process.
|
||||||
|
- On UNIX, we `mlock(2)` the memory backing the `&'static str`.
|
||||||
|
- When using the `&'static str` when building an HTTP request, we use `HeaderValue::from_static()` to avoid copying the `&str`.
|
||||||
|
- We also invoke `.set_sensitive(true)` on the `HeaderValue`, which in theory indicates to other parts of the HTTP stack that the header should be treated with "special care" to avoid leakage:
|
||||||
|
|
||||||
|
https://github.com/hyperium/http/blob/439d1c50d71e3be3204b6c4a1bf2255ed78e1f93/src/header/value.rs#L346-L376
|
||||||
|
|||||||
13
codex-rs/responses-api-proxy/npm/README.md
Normal file
13
codex-rs/responses-api-proxy/npm/README.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# @openai/codex-responses-api-proxy
|
||||||
|
|
||||||
|
<p align="center"><code>npm i -g @openai/codex-responses-api-proxy</code> to install <code>codex-responses-api-proxy</code></p>
|
||||||
|
|
||||||
|
This package distributes the prebuilt [Codex Responses API proxy binary](https://github.com/openai/codex/tree/main/codex-rs/responses-api-proxy) for macOS, Linux, and Windows.
|
||||||
|
|
||||||
|
To see available options, run:
|
||||||
|
|
||||||
|
```
|
||||||
|
node ./bin/codex-responses-api-proxy.js --help
|
||||||
|
```
|
||||||
|
|
||||||
|
Refer to [`codex-rs/responses-api-proxy/README.md`](https://github.com/openai/codex/blob/main/codex-rs/responses-api-proxy/README.md) for detailed documentation.
|
||||||
97
codex-rs/responses-api-proxy/npm/bin/codex-responses-api-proxy.js
Executable file
97
codex-rs/responses-api-proxy/npm/bin/codex-responses-api-proxy.js
Executable file
@@ -0,0 +1,97 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
// Entry point for the Codex responses API proxy binary.
|
||||||
|
|
||||||
|
import { spawn } from "node:child_process";
|
||||||
|
import path from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
function determineTargetTriple(platform, arch) {
|
||||||
|
switch (platform) {
|
||||||
|
case "linux":
|
||||||
|
case "android":
|
||||||
|
if (arch === "x64") {
|
||||||
|
return "x86_64-unknown-linux-musl";
|
||||||
|
}
|
||||||
|
if (arch === "arm64") {
|
||||||
|
return "aarch64-unknown-linux-musl";
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "darwin":
|
||||||
|
if (arch === "x64") {
|
||||||
|
return "x86_64-apple-darwin";
|
||||||
|
}
|
||||||
|
if (arch === "arm64") {
|
||||||
|
return "aarch64-apple-darwin";
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "win32":
|
||||||
|
if (arch === "x64") {
|
||||||
|
return "x86_64-pc-windows-msvc";
|
||||||
|
}
|
||||||
|
if (arch === "arm64") {
|
||||||
|
return "aarch64-pc-windows-msvc";
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetTriple = determineTargetTriple(process.platform, process.arch);
|
||||||
|
if (!targetTriple) {
|
||||||
|
throw new Error(
|
||||||
|
`Unsupported platform: ${process.platform} (${process.arch})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||||
|
const archRoot = path.join(vendorRoot, targetTriple);
|
||||||
|
const binaryBaseName = "codex-responses-api-proxy";
|
||||||
|
const binaryPath = path.join(
|
||||||
|
archRoot,
|
||||||
|
binaryBaseName,
|
||||||
|
process.platform === "win32" ? `${binaryBaseName}.exe` : binaryBaseName,
|
||||||
|
);
|
||||||
|
|
||||||
|
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||||
|
stdio: "inherit",
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on("error", (err) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
const forwardSignal = (signal) => {
|
||||||
|
if (!child.killed) {
|
||||||
|
try {
|
||||||
|
child.kill(signal);
|
||||||
|
} catch {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
["SIGINT", "SIGTERM", "SIGHUP"].forEach((sig) => {
|
||||||
|
process.on(sig, () => forwardSignal(sig));
|
||||||
|
});
|
||||||
|
|
||||||
|
const childResult = await new Promise((resolve) => {
|
||||||
|
child.on("exit", (code, signal) => {
|
||||||
|
if (signal) {
|
||||||
|
resolve({ type: "signal", signal });
|
||||||
|
} else {
|
||||||
|
resolve({ type: "code", exitCode: code ?? 1 });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (childResult.type === "signal") {
|
||||||
|
process.kill(process.pid, childResult.signal);
|
||||||
|
} else {
|
||||||
|
process.exit(childResult.exitCode);
|
||||||
|
}
|
||||||
21
codex-rs/responses-api-proxy/npm/package.json
Normal file
21
codex-rs/responses-api-proxy/npm/package.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "@openai/codex-responses-api-proxy",
|
||||||
|
"version": "0.0.0-dev",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"bin": {
|
||||||
|
"codex-responses-api-proxy": "bin/codex-responses-api-proxy.js"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=16"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"bin",
|
||||||
|
"vendor"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/openai/codex.git",
|
||||||
|
"directory": "codex-rs/responses-api-proxy/npm"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -54,9 +54,16 @@ where
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Err(err) = validate_auth_header_bytes(&buf[AUTH_HEADER_PREFIX.len()..total]) {
|
||||||
|
buf.zeroize();
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
|
||||||
let header_str = match std::str::from_utf8(&buf[..total]) {
|
let header_str = match std::str::from_utf8(&buf[..total]) {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
// In theory, validate_auth_header_bytes() should have caught
|
||||||
|
// any invalid UTF-8 sequences, but just in case...
|
||||||
buf.zeroize();
|
buf.zeroize();
|
||||||
return Err(err).context("reading Authorization header from stdin as UTF-8");
|
return Err(err).context("reading Authorization header from stdin as UTF-8");
|
||||||
}
|
}
|
||||||
@@ -113,6 +120,21 @@ fn mlock_str(value: &str) {
|
|||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
fn mlock_str(_value: &str) {}
|
fn mlock_str(_value: &str) {}
|
||||||
|
|
||||||
|
/// The key should match /^[A-Za-z0-9\-_]+$/. Ensure there is no funny business
|
||||||
|
/// with NUL characters and whatnot.
|
||||||
|
fn validate_auth_header_bytes(key_bytes: &[u8]) -> Result<()> {
|
||||||
|
if key_bytes
|
||||||
|
.iter()
|
||||||
|
.all(|byte| byte.is_ascii_alphanumeric() || matches!(byte, b'-' | b'_'))
|
||||||
|
{
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(anyhow!(
|
||||||
|
"OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'"
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -158,7 +180,7 @@ mod tests {
|
|||||||
})
|
})
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
let message = format!("{err:#}");
|
let message = format!("{err:#}");
|
||||||
assert!(message.contains("too large"));
|
assert!(message.contains("OPENAI_API_KEY is too large to fit in the 512-byte buffer"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -180,6 +202,23 @@ mod tests {
|
|||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
|
|
||||||
let message = format!("{err:#}");
|
let message = format!("{err:#}");
|
||||||
assert!(message.contains("UTF-8"));
|
assert!(
|
||||||
|
message.contains("OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errors_on_invalid_characters() {
|
||||||
|
let err = read_auth_header_with(|buf| {
|
||||||
|
let data = b"sk-abc!23";
|
||||||
|
buf[..data.len()].copy_from_slice(data);
|
||||||
|
Ok(data.len())
|
||||||
|
})
|
||||||
|
.unwrap_err();
|
||||||
|
|
||||||
|
let message = format!("{err:#}");
|
||||||
|
assert!(
|
||||||
|
message.contains("OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user