Release builds are taking awhile and part of the reason that we are building binaries that we are not really using. Adding Windows binaries into releases (https://github.com/openai/codex/pull/2035) slows things down, so we need to get some time back. - `codex-exec` is basically a standalone `codex exec` that we were offering because it's a bit smaller as it does not include all the bits to power the TUI. We were using it in our experimental GitHub Action, so this PR updates the Action to use `codex exec` instead. - `codex-linux-sandbox` was a helper binary for the TypeScript version of the CLI, but I am about to axe that, so we don't need this either. If we decide to bring `codex-exec` back at some point, we should use a separate instances so we can build it in parallel with `codex`. (I think if we had beefier build machines, this wouldn't be so bad, but that's not the case with the default runners from GitHub.)
59 lines
1.7 KiB
TypeScript
59 lines
1.7 KiB
TypeScript
import { fail } from "./fail";
|
|
import { EnvContext } from "./env-context";
|
|
import { tmpdir } from "os";
|
|
import { join } from "node:path";
|
|
import { readFile, mkdtemp } from "fs/promises";
|
|
import { resolveWorkspacePath } from "./github-workspace";
|
|
|
|
/**
|
|
* Runs the Codex CLI with the provided prompt and returns the output written
|
|
* to the "last message" file.
|
|
*/
|
|
export async function runCodex(
|
|
prompt: string,
|
|
ctx: EnvContext,
|
|
): Promise<string> {
|
|
const OPENAI_API_KEY = ctx.get("OPENAI_API_KEY");
|
|
|
|
const tempDirPath = await mkdtemp(join(tmpdir(), "codex-"));
|
|
const lastMessageOutput = join(tempDirPath, "codex-prompt.md");
|
|
|
|
// Use the unified CLI and its `exec` subcommand instead of the old
|
|
// standalone `codex-exec` binary.
|
|
const args = ["/usr/local/bin/codex", "exec"];
|
|
|
|
const inputCodexArgs = ctx.tryGet("INPUT_CODEX_ARGS")?.trim();
|
|
if (inputCodexArgs) {
|
|
args.push(...inputCodexArgs.split(/\s+/));
|
|
}
|
|
|
|
args.push("--output-last-message", lastMessageOutput, prompt);
|
|
|
|
const env: Record<string, string> = { ...process.env, OPENAI_API_KEY };
|
|
const INPUT_CODEX_HOME = ctx.tryGet("INPUT_CODEX_HOME");
|
|
if (INPUT_CODEX_HOME) {
|
|
env.CODEX_HOME = resolveWorkspacePath(INPUT_CODEX_HOME, ctx);
|
|
}
|
|
|
|
console.log(`Running Codex: ${JSON.stringify(args)}`);
|
|
const result = Bun.spawnSync(args, {
|
|
stdout: "inherit",
|
|
stderr: "inherit",
|
|
env,
|
|
});
|
|
|
|
if (!result.success) {
|
|
fail(`Codex failed: see above for details.`);
|
|
}
|
|
|
|
// Read the output generated by Codex.
|
|
let lastMessage: string;
|
|
try {
|
|
lastMessage = await readFile(lastMessageOutput, "utf8");
|
|
} catch (err) {
|
|
fail(`Failed to read Codex output at '${lastMessageOutput}': ${err}`);
|
|
}
|
|
|
|
return lastMessage;
|
|
}
|