chore: introduce publishing logic for @openai/codex-sdk (#4543)
There was a bit of copypasta I put up with when were publishing two
packages to npm, but now that it's three, I created some more scripts to
consolidate things.
With this change, I ran:
```shell
./scripts/stage_npm_packages.py --release-version 0.43.0-alpha.8 --package codex --package codex-responses-api-proxy --package codex-sdk
```
Indeed when it finished, I ended up with:
```shell
$ tree dist
dist
└── npm
├── codex-npm-0.43.0-alpha.8.tgz
├── codex-responses-api-proxy-npm-0.43.0-alpha.8.tgz
└── codex-sdk-npm-0.43.0-alpha.8.tgz
$ tar tzvf dist/npm/codex-sdk-npm-0.43.0-alpha.8.tgz
-rwxr-xr-x 0 0 0 25476720 Oct 26 1985 package/vendor/aarch64-apple-darwin/codex/codex
-rwxr-xr-x 0 0 0 29871400 Oct 26 1985 package/vendor/aarch64-unknown-linux-musl/codex/codex
-rwxr-xr-x 0 0 0 28368096 Oct 26 1985 package/vendor/x86_64-apple-darwin/codex/codex
-rwxr-xr-x 0 0 0 36029472 Oct 26 1985 package/vendor/x86_64-unknown-linux-musl/codex/codex
-rw-r--r-- 0 0 0 10926 Oct 26 1985 package/LICENSE
-rw-r--r-- 0 0 0 30187520 Oct 26 1985 package/vendor/aarch64-pc-windows-msvc/codex/codex.exe
-rw-r--r-- 0 0 0 35277824 Oct 26 1985 package/vendor/x86_64-pc-windows-msvc/codex/codex.exe
-rw-r--r-- 0 0 0 4842 Oct 26 1985 package/dist/index.js
-rw-r--r-- 0 0 0 1347 Oct 26 1985 package/package.json
-rw-r--r-- 0 0 0 9867 Oct 26 1985 package/dist/index.js.map
-rw-r--r-- 0 0 0 12 Oct 26 1985 package/README.md
-rw-r--r-- 0 0 0 4287 Oct 26 1985 package/dist/index.d.ts
```
This commit is contained in:
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
# build_npm_package.py requires DotSlash when staging releases.
|
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||||
- uses: facebook/install-dotslash@v2
|
- uses: facebook/install-dotslash@v2
|
||||||
|
|
||||||
- name: Stage npm package
|
- name: Stage npm package
|
||||||
@@ -37,10 +37,12 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CODEX_VERSION=0.40.0
|
CODEX_VERSION=0.40.0
|
||||||
PACK_OUTPUT="${RUNNER_TEMP}/codex-npm.tgz"
|
OUTPUT_DIR="${RUNNER_TEMP}"
|
||||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
python3 ./scripts/stage_npm_packages.py \
|
||||||
--release-version "$CODEX_VERSION" \
|
--release-version "$CODEX_VERSION" \
|
||||||
--pack-output "$PACK_OUTPUT"
|
--package codex \
|
||||||
|
--output-dir "$OUTPUT_DIR"
|
||||||
|
PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz"
|
||||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Upload staged npm package artifact
|
- name: Upload staged npm package artifact
|
||||||
|
|||||||
41
.github/workflows/rust-release.yml
vendored
41
.github/workflows/rust-release.yml
vendored
@@ -216,31 +216,27 @@ jobs:
|
|||||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# build_npm_package.py requires DotSlash when staging releases.
|
- name: Setup Node.js for npm packaging
|
||||||
- uses: facebook/install-dotslash@v2
|
uses: actions/setup-node@v5
|
||||||
- name: Stage codex CLI npm package
|
with:
|
||||||
env:
|
node-version: 22
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
|
||||||
./codex-cli/scripts/build_npm_package.py \
|
|
||||||
--package codex \
|
|
||||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
|
||||||
--staging-dir "${TMP_DIR}" \
|
|
||||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
|
||||||
|
|
||||||
- name: Stage responses API proxy npm package
|
- name: Setup pnpm
|
||||||
|
uses: pnpm/action-setup@v3
|
||||||
|
with:
|
||||||
|
version: 9
|
||||||
|
|
||||||
|
# stage_npm_packages.py requires DotSlash when staging releases.
|
||||||
|
- uses: facebook/install-dotslash@v2
|
||||||
|
- name: Stage npm packages
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
./scripts/stage_npm_packages.py \
|
||||||
TMP_DIR="${RUNNER_TEMP}/npm-stage-responses"
|
|
||||||
./codex-cli/scripts/build_npm_package.py \
|
|
||||||
--package codex-responses-api-proxy \
|
|
||||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||||
--staging-dir "${TMP_DIR}" \
|
--package codex \
|
||||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz"
|
--package codex-responses-api-proxy \
|
||||||
|
--package codex-sdk
|
||||||
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
@@ -300,6 +296,10 @@ jobs:
|
|||||||
--repo "${GITHUB_REPOSITORY}" \
|
--repo "${GITHUB_REPOSITORY}" \
|
||||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||||
--dir dist/npm
|
--dir dist/npm
|
||||||
|
gh release download "$tag" \
|
||||||
|
--repo "${GITHUB_REPOSITORY}" \
|
||||||
|
--pattern "codex-sdk-npm-${version}.tgz" \
|
||||||
|
--dir dist/npm
|
||||||
|
|
||||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||||
- name: Publish to npm
|
- name: Publish to npm
|
||||||
@@ -316,6 +316,7 @@ jobs:
|
|||||||
tarballs=(
|
tarballs=(
|
||||||
"codex-npm-${VERSION}.tgz"
|
"codex-npm-${VERSION}.tgz"
|
||||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||||
|
"codex-sdk-npm-${VERSION}.tgz"
|
||||||
)
|
)
|
||||||
|
|
||||||
for tarball in "${tarballs[@]}"; do
|
for tarball in "${tarballs[@]}"; do
|
||||||
|
|||||||
@@ -1,11 +1,19 @@
|
|||||||
# npm releases
|
# npm releases
|
||||||
|
|
||||||
Run the following:
|
Use the staging helper in the repo root to generate npm tarballs for a release. For
|
||||||
|
example, to stage the CLI, responses proxy, and SDK packages for version `0.6.0`:
|
||||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./codex-cli/scripts/build_npm_package.py --release-version 0.6.0
|
./scripts/stage_npm_packages.py \
|
||||||
|
--release-version 0.6.0 \
|
||||||
|
--package codex \
|
||||||
|
--package codex-responses-api-proxy \
|
||||||
|
--package codex-sdk
|
||||||
```
|
```
|
||||||
|
|
||||||
Note this will create `./codex-cli/vendor/` as a side-effect.
|
This downloads the native artifacts once, hydrates `vendor/` for each package, and writes
|
||||||
|
tarballs to `dist/npm/`.
|
||||||
|
|
||||||
|
If you need to invoke `build_npm_package.py` directly, run
|
||||||
|
`codex-cli/scripts/install_native_deps.py` first and pass `--vendor-src` pointing to the
|
||||||
|
directory that contains the populated `vendor/` tree.
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@@ -14,19 +13,25 @@ SCRIPT_DIR = Path(__file__).resolve().parent
|
|||||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||||
GITHUB_REPO = "openai/codex"
|
CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript"
|
||||||
|
|
||||||
# The docs are not clear on what the expected value/format of
|
PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
|
||||||
# workflow/workflowName is:
|
"codex": ["codex", "rg"],
|
||||||
# https://cli.github.com/manual/gh_run_list
|
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||||
WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
"codex-sdk": ["codex"],
|
||||||
|
}
|
||||||
|
COMPONENT_DEST_DIR: dict[str, str] = {
|
||||||
|
"codex": "codex",
|
||||||
|
"codex-responses-api-proxy": "codex-responses-api-proxy",
|
||||||
|
"rg": "path",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> argparse.Namespace:
|
def parse_args() -> argparse.Namespace:
|
||||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--package",
|
"--package",
|
||||||
choices=("codex", "codex-responses-api-proxy"),
|
choices=("codex", "codex-responses-api-proxy", "codex-sdk"),
|
||||||
default="codex",
|
default="codex",
|
||||||
help="Which npm package to stage (default: codex).",
|
help="Which npm package to stage (default: codex).",
|
||||||
)
|
)
|
||||||
@@ -37,14 +42,9 @@ def parse_args() -> argparse.Namespace:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--release-version",
|
"--release-version",
|
||||||
help=(
|
help=(
|
||||||
"Version to stage for npm release. When provided, the script also resolves the "
|
"Version to stage for npm release."
|
||||||
"matching rust-release workflow unless --workflow-url is supplied."
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--workflow-url",
|
|
||||||
help="Optional GitHub Actions workflow run URL used to download native binaries.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--staging-dir",
|
"--staging-dir",
|
||||||
type=Path,
|
type=Path,
|
||||||
@@ -64,6 +64,11 @@ def parse_args() -> argparse.Namespace:
|
|||||||
type=Path,
|
type=Path,
|
||||||
help="Path where the generated npm tarball should be written.",
|
help="Path where the generated npm tarball should be written.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--vendor-src",
|
||||||
|
type=Path,
|
||||||
|
help="Directory containing pre-installed native binaries to bundle (vendor root).",
|
||||||
|
)
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
@@ -86,29 +91,19 @@ def main() -> int:
|
|||||||
try:
|
try:
|
||||||
stage_sources(staging_dir, version, package)
|
stage_sources(staging_dir, version, package)
|
||||||
|
|
||||||
workflow_url = args.workflow_url
|
vendor_src = args.vendor_src.resolve() if args.vendor_src else None
|
||||||
resolved_head_sha: str | None = None
|
native_components = PACKAGE_NATIVE_COMPONENTS.get(package, [])
|
||||||
if not workflow_url:
|
|
||||||
if release_version:
|
|
||||||
workflow = resolve_release_workflow(version)
|
|
||||||
workflow_url = workflow["url"]
|
|
||||||
resolved_head_sha = workflow.get("headSha")
|
|
||||||
else:
|
|
||||||
workflow_url = resolve_latest_alpha_workflow_url()
|
|
||||||
elif release_version:
|
|
||||||
try:
|
|
||||||
workflow = resolve_release_workflow(version)
|
|
||||||
resolved_head_sha = workflow.get("headSha")
|
|
||||||
except Exception:
|
|
||||||
resolved_head_sha = None
|
|
||||||
|
|
||||||
if release_version and resolved_head_sha:
|
if native_components:
|
||||||
print(f"should `git checkout {resolved_head_sha}`")
|
if vendor_src is None:
|
||||||
|
components_str = ", ".join(native_components)
|
||||||
|
raise RuntimeError(
|
||||||
|
"Native components "
|
||||||
|
f"({components_str}) required for package '{package}'. Provide --vendor-src "
|
||||||
|
"pointing to a directory containing pre-installed binaries."
|
||||||
|
)
|
||||||
|
|
||||||
if not workflow_url:
|
copy_native_binaries(vendor_src, staging_dir, native_components)
|
||||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
|
||||||
|
|
||||||
install_native_binaries(staging_dir, workflow_url, package)
|
|
||||||
|
|
||||||
if release_version:
|
if release_version:
|
||||||
staging_dir_str = str(staging_dir)
|
staging_dir_str = str(staging_dir)
|
||||||
@@ -119,12 +114,20 @@ def main() -> int:
|
|||||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||||
)
|
)
|
||||||
else:
|
elif package == "codex-responses-api-proxy":
|
||||||
print(
|
print(
|
||||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||||
"Verify the responses API proxy:\n"
|
"Verify the responses API proxy:\n"
|
||||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||||
|
"Verify the SDK contents:\n"
|
||||||
|
f" ls {staging_dir_str}/dist\n"
|
||||||
|
f" ls {staging_dir_str}/vendor\n"
|
||||||
|
" node -e \"import('./dist/index.js').then(() => console.log('ok'))\"\n\n"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f"Staged package in {staging_dir}")
|
print(f"Staged package in {staging_dir}")
|
||||||
|
|
||||||
@@ -152,10 +155,9 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
|||||||
|
|
||||||
|
|
||||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||||
bin_dir = staging_dir / "bin"
|
|
||||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
if package == "codex":
|
if package == "codex":
|
||||||
|
bin_dir = staging_dir / "bin"
|
||||||
|
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||||
if rg_manifest.exists():
|
if rg_manifest.exists():
|
||||||
@@ -167,6 +169,8 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
|||||||
|
|
||||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||||
elif package == "codex-responses-api-proxy":
|
elif package == "codex-responses-api-proxy":
|
||||||
|
bin_dir = staging_dir / "bin"
|
||||||
|
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||||
|
|
||||||
@@ -175,6 +179,9 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
|||||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||||
|
|
||||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||||
|
elif package == "codex-sdk":
|
||||||
|
package_json_path = CODEX_SDK_ROOT / "package.json"
|
||||||
|
stage_codex_sdk_sources(staging_dir)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f"Unknown package '{package}'.")
|
raise RuntimeError(f"Unknown package '{package}'.")
|
||||||
|
|
||||||
@@ -182,91 +189,85 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
|||||||
package_json = json.load(fh)
|
package_json = json.load(fh)
|
||||||
package_json["version"] = version
|
package_json["version"] = version
|
||||||
|
|
||||||
|
if package == "codex-sdk":
|
||||||
|
scripts = package_json.get("scripts")
|
||||||
|
if isinstance(scripts, dict):
|
||||||
|
scripts.pop("prepare", None)
|
||||||
|
|
||||||
|
files = package_json.get("files")
|
||||||
|
if isinstance(files, list):
|
||||||
|
if "vendor" not in files:
|
||||||
|
files.append("vendor")
|
||||||
|
else:
|
||||||
|
package_json["files"] = ["dist", "vendor"]
|
||||||
|
|
||||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||||
json.dump(package_json, out, indent=2)
|
json.dump(package_json, out, indent=2)
|
||||||
out.write("\n")
|
out.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None:
|
def run_command(cmd: list[str], cwd: Path | None = None) -> None:
|
||||||
package_components = {
|
print("+", " ".join(cmd))
|
||||||
"codex": ["codex", "rg"],
|
subprocess.run(cmd, cwd=cwd, check=True)
|
||||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
|
||||||
}
|
|
||||||
|
|
||||||
components = package_components.get(package)
|
|
||||||
if components is None:
|
|
||||||
raise RuntimeError(f"Unknown package '{package}'.")
|
|
||||||
|
|
||||||
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url]
|
|
||||||
for component in components:
|
|
||||||
cmd.extend(["--component", component])
|
|
||||||
cmd.append(str(staging_dir))
|
|
||||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_latest_alpha_workflow_url() -> str:
|
def stage_codex_sdk_sources(staging_dir: Path) -> None:
|
||||||
version = determine_latest_alpha_version()
|
package_root = CODEX_SDK_ROOT
|
||||||
workflow = resolve_release_workflow(version)
|
|
||||||
return workflow["url"]
|
run_command(["pnpm", "install", "--frozen-lockfile"], cwd=package_root)
|
||||||
|
run_command(["pnpm", "run", "build"], cwd=package_root)
|
||||||
|
|
||||||
|
dist_src = package_root / "dist"
|
||||||
|
if not dist_src.exists():
|
||||||
|
raise RuntimeError("codex-sdk build did not produce a dist directory.")
|
||||||
|
|
||||||
|
shutil.copytree(dist_src, staging_dir / "dist")
|
||||||
|
|
||||||
|
readme_src = package_root / "README.md"
|
||||||
|
if readme_src.exists():
|
||||||
|
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||||
|
|
||||||
|
license_src = REPO_ROOT / "LICENSE"
|
||||||
|
if license_src.exists():
|
||||||
|
shutil.copy2(license_src, staging_dir / "LICENSE")
|
||||||
|
|
||||||
|
|
||||||
def determine_latest_alpha_version() -> str:
|
def copy_native_binaries(vendor_src: Path, staging_dir: Path, components: list[str]) -> None:
|
||||||
releases = list_releases()
|
vendor_src = vendor_src.resolve()
|
||||||
best_key: tuple[int, int, int, int] | None = None
|
if not vendor_src.exists():
|
||||||
best_version: str | None = None
|
raise RuntimeError(f"Vendor source directory not found: {vendor_src}")
|
||||||
pattern = re.compile(r"^rust-v(\d+)\.(\d+)\.(\d+)-alpha\.(\d+)$")
|
|
||||||
for release in releases:
|
components_set = {component for component in components if component in COMPONENT_DEST_DIR}
|
||||||
tag = release.get("tag_name", "")
|
if not components_set:
|
||||||
match = pattern.match(tag)
|
return
|
||||||
if not match:
|
|
||||||
|
vendor_dest = staging_dir / "vendor"
|
||||||
|
if vendor_dest.exists():
|
||||||
|
shutil.rmtree(vendor_dest)
|
||||||
|
vendor_dest.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for target_dir in vendor_src.iterdir():
|
||||||
|
if not target_dir.is_dir():
|
||||||
continue
|
continue
|
||||||
key = tuple(int(match.group(i)) for i in range(1, 5))
|
|
||||||
if best_key is None or key > best_key:
|
|
||||||
best_key = key
|
|
||||||
best_version = (
|
|
||||||
f"{match.group(1)}.{match.group(2)}.{match.group(3)}-alpha.{match.group(4)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if best_version is None:
|
dest_target_dir = vendor_dest / target_dir.name
|
||||||
raise RuntimeError("No alpha releases found when resolving workflow URL.")
|
dest_target_dir.mkdir(parents=True, exist_ok=True)
|
||||||
return best_version
|
|
||||||
|
|
||||||
|
for component in components_set:
|
||||||
|
dest_dir_name = COMPONENT_DEST_DIR.get(component)
|
||||||
|
if dest_dir_name is None:
|
||||||
|
continue
|
||||||
|
|
||||||
def list_releases() -> list[dict]:
|
src_component_dir = target_dir / dest_dir_name
|
||||||
stdout = subprocess.check_output(
|
if not src_component_dir.exists():
|
||||||
["gh", "api", f"/repos/{GITHUB_REPO}/releases?per_page=100"],
|
raise RuntimeError(
|
||||||
text=True,
|
f"Missing native component '{component}' in vendor source: {src_component_dir}"
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
releases = json.loads(stdout or "[]")
|
|
||||||
except json.JSONDecodeError as exc:
|
|
||||||
raise RuntimeError("Unable to parse releases JSON.") from exc
|
|
||||||
if not isinstance(releases, list):
|
|
||||||
raise RuntimeError("Unexpected response when listing releases.")
|
|
||||||
return releases
|
|
||||||
|
|
||||||
|
dest_component_dir = dest_target_dir / dest_dir_name
|
||||||
def resolve_release_workflow(version: str) -> dict:
|
if dest_component_dir.exists():
|
||||||
stdout = subprocess.check_output(
|
shutil.rmtree(dest_component_dir)
|
||||||
[
|
shutil.copytree(src_component_dir, dest_component_dir)
|
||||||
"gh",
|
|
||||||
"run",
|
|
||||||
"list",
|
|
||||||
"--branch",
|
|
||||||
f"rust-v{version}",
|
|
||||||
"--json",
|
|
||||||
"workflowName,url,headSha",
|
|
||||||
"--workflow",
|
|
||||||
WORKFLOW_NAME,
|
|
||||||
"--jq",
|
|
||||||
"first(.[])",
|
|
||||||
],
|
|
||||||
text=True,
|
|
||||||
)
|
|
||||||
workflow = json.loads(stdout or "[]")
|
|
||||||
if not workflow:
|
|
||||||
raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
|
|
||||||
return workflow
|
|
||||||
|
|
||||||
|
|
||||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||||
|
|||||||
187
scripts/stage_npm_packages.py
Executable file
187
scripts/stage_npm_packages.py
Executable file
@@ -0,0 +1,187 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Stage one or more Codex npm packages for release."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import importlib.util
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
REPO_ROOT = Path(__file__).resolve().parent.parent
|
||||||
|
BUILD_SCRIPT = REPO_ROOT / "codex-cli" / "scripts" / "build_npm_package.py"
|
||||||
|
INSTALL_NATIVE_DEPS = REPO_ROOT / "codex-cli" / "scripts" / "install_native_deps.py"
|
||||||
|
WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||||
|
GITHUB_REPO = "openai/codex"
|
||||||
|
|
||||||
|
_SPEC = importlib.util.spec_from_file_location("codex_build_npm_package", BUILD_SCRIPT)
|
||||||
|
if _SPEC is None or _SPEC.loader is None:
|
||||||
|
raise RuntimeError(f"Unable to load module from {BUILD_SCRIPT}")
|
||||||
|
_BUILD_MODULE = importlib.util.module_from_spec(_SPEC)
|
||||||
|
_SPEC.loader.exec_module(_BUILD_MODULE)
|
||||||
|
PACKAGE_NATIVE_COMPONENTS = getattr(_BUILD_MODULE, "PACKAGE_NATIVE_COMPONENTS", {})
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
|
parser.add_argument(
|
||||||
|
"--release-version",
|
||||||
|
required=True,
|
||||||
|
help="Version to stage (e.g. 0.1.0 or 0.1.0-alpha.1).",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--package",
|
||||||
|
dest="packages",
|
||||||
|
action="append",
|
||||||
|
required=True,
|
||||||
|
help="Package name to stage. May be provided multiple times.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--workflow-url",
|
||||||
|
help="Optional workflow URL to reuse for native artifacts.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output-dir",
|
||||||
|
type=Path,
|
||||||
|
default=None,
|
||||||
|
help="Directory where npm tarballs should be written (default: dist/npm).",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--keep-staging-dirs",
|
||||||
|
action="store_true",
|
||||||
|
help="Retain temporary staging directories instead of deleting them.",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def collect_native_components(packages: list[str]) -> set[str]:
|
||||||
|
components: set[str] = set()
|
||||||
|
for package in packages:
|
||||||
|
components.update(PACKAGE_NATIVE_COMPONENTS.get(package, []))
|
||||||
|
return components
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_release_workflow(version: str) -> dict:
|
||||||
|
stdout = subprocess.check_output(
|
||||||
|
[
|
||||||
|
"gh",
|
||||||
|
"run",
|
||||||
|
"list",
|
||||||
|
"--branch",
|
||||||
|
f"rust-v{version}",
|
||||||
|
"--json",
|
||||||
|
"workflowName,url,headSha",
|
||||||
|
"--workflow",
|
||||||
|
WORKFLOW_NAME,
|
||||||
|
"--jq",
|
||||||
|
"first(.[])",
|
||||||
|
],
|
||||||
|
cwd=REPO_ROOT,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
workflow = json.loads(stdout or "null")
|
||||||
|
if not workflow:
|
||||||
|
raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
|
||||||
|
return workflow
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_workflow_url(version: str, override: str | None) -> tuple[str, str | None]:
|
||||||
|
if override:
|
||||||
|
return override, None
|
||||||
|
|
||||||
|
workflow = resolve_release_workflow(version)
|
||||||
|
return workflow["url"], workflow.get("headSha")
|
||||||
|
|
||||||
|
|
||||||
|
def install_native_components(
|
||||||
|
workflow_url: str,
|
||||||
|
components: set[str],
|
||||||
|
vendor_root: Path,
|
||||||
|
) -> None:
|
||||||
|
if not components:
|
||||||
|
return
|
||||||
|
|
||||||
|
cmd = [str(INSTALL_NATIVE_DEPS), "--workflow-url", workflow_url]
|
||||||
|
for component in sorted(components):
|
||||||
|
cmd.extend(["--component", component])
|
||||||
|
cmd.append(str(vendor_root))
|
||||||
|
run_command(cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(cmd: list[str]) -> None:
|
||||||
|
print("+", " ".join(cmd))
|
||||||
|
subprocess.run(cmd, cwd=REPO_ROOT, check=True)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
output_dir = args.output_dir or (REPO_ROOT / "dist" / "npm")
|
||||||
|
output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
runner_temp = Path(os.environ.get("RUNNER_TEMP", tempfile.gettempdir()))
|
||||||
|
|
||||||
|
packages = list(args.packages)
|
||||||
|
native_components = collect_native_components(packages)
|
||||||
|
|
||||||
|
vendor_temp_root: Path | None = None
|
||||||
|
vendor_src: Path | None = None
|
||||||
|
resolved_head_sha: str | None = None
|
||||||
|
|
||||||
|
final_messsages = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
if native_components:
|
||||||
|
workflow_url, resolved_head_sha = resolve_workflow_url(
|
||||||
|
args.release_version, args.workflow_url
|
||||||
|
)
|
||||||
|
vendor_temp_root = Path(tempfile.mkdtemp(prefix="npm-native-", dir=runner_temp))
|
||||||
|
install_native_components(workflow_url, native_components, vendor_temp_root)
|
||||||
|
vendor_src = vendor_temp_root / "vendor"
|
||||||
|
|
||||||
|
if resolved_head_sha:
|
||||||
|
print(f"should `git checkout {resolved_head_sha}`")
|
||||||
|
|
||||||
|
for package in packages:
|
||||||
|
staging_dir = Path(tempfile.mkdtemp(prefix=f"npm-stage-{package}-", dir=runner_temp))
|
||||||
|
pack_output = output_dir / f"{package}-npm-{args.release_version}.tgz"
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
str(BUILD_SCRIPT),
|
||||||
|
"--package",
|
||||||
|
package,
|
||||||
|
"--release-version",
|
||||||
|
args.release_version,
|
||||||
|
"--staging-dir",
|
||||||
|
str(staging_dir),
|
||||||
|
"--pack-output",
|
||||||
|
str(pack_output),
|
||||||
|
]
|
||||||
|
|
||||||
|
if vendor_src is not None:
|
||||||
|
cmd.extend(["--vendor-src", str(vendor_src)])
|
||||||
|
|
||||||
|
try:
|
||||||
|
run_command(cmd)
|
||||||
|
finally:
|
||||||
|
if not args.keep_staging_dirs:
|
||||||
|
shutil.rmtree(staging_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
final_messsages.append(f"Staged {package} at {pack_output}")
|
||||||
|
finally:
|
||||||
|
if vendor_temp_root is not None and not args.keep_staging_dirs:
|
||||||
|
shutil.rmtree(vendor_temp_root, ignore_errors=True)
|
||||||
|
|
||||||
|
for msg in final_messsages:
|
||||||
|
print(msg)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
raise SystemExit(main())
|
||||||
1
sdk/typescript/README.md
Normal file
1
sdk/typescript/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Codex SDK
|
||||||
@@ -2,6 +2,11 @@
|
|||||||
"name": "@openai/codex-sdk",
|
"name": "@openai/codex-sdk",
|
||||||
"version": "0.0.0-dev",
|
"version": "0.0.0-dev",
|
||||||
"description": "TypeScript SDK for Codex APIs.",
|
"description": "TypeScript SDK for Codex APIs.",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/openai/codex.git",
|
||||||
|
"directory": "sdk/typescript"
|
||||||
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"openai",
|
"openai",
|
||||||
"codex",
|
"codex",
|
||||||
|
|||||||
Reference in New Issue
Block a user