From 5aafe190e2657dc8b9d53e98e7bf6ec6389183b1 Mon Sep 17 00:00:00 2001 From: Govind Kamtamneni Date: Sun, 22 Jun 2025 17:56:36 -0700 Subject: [PATCH] =?UTF-8?q?feat(ts):=20provider=E2=80=91specific=20API?= =?UTF-8?q?=E2=80=91key=20discovery=20and=20clearer=20Azure=20guidance=20(?= =?UTF-8?q?#1324)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary This PR refactors the Codex CLI authentication flow so that **non-OpenAI** providers (for example **azure**, or any future addition) can supply their API key through a dedicated environment variable without triggering the OpenAI login flow. Key behaviours introduced: * When `provider !== "openai"` the CLI consults `src/utils/providers.ts` to locate the correct environment variable (`AZURE_OPENAI_API_KEY`, `GEMINI_API_KEY`, and so on) before considering any interactive login. * Credit redemption (`--free`) and PKCE login now run **only** when the provider is OpenAI, eliminating unwanted browser prompts for Azure and others. * User-facing error messages are revamped to guide Azure users to **[https://ai.azure.com/](https://ai.azure.com)** and show the exact variable name they must set. * All code paths still export `OPENAI_API_KEY` so legacy scripts continue to operate unchanged. --- ## Example `config.json` ```jsonc { "model": "codex-mini", "provider": "azure", "providers": { "azure": { "name": "AzureOpenAI", "baseURL": "https://ai-.openai.azure.com/openai", "envKey": "AZURE_OPENAI_API_KEY" } }, "history": { "maxSize": 1000, "saveHistory": true, "sensitivePatterns": [] } } ``` With this file in `~/.codex/config.json`, a single command line is enough: ```bash export AZURE_OPENAI_API_KEY="" codex "Hello from Azure" ``` No browser window opens, and the CLI works in entirely non-interactive mode. --- ## Rationale The new flow enables Codex to run **asynchronously** in sandboxed environments such as GitHub Actions pipelines. By passing `--provider azure` (or setting it in `config.json`) and exporting the correct key, CI/CD jobs can invoke Codex without any ChatGPT-style login or PKCE round-trip. This unlocks fully automated testing and deployment scenarios. --- ## What’s changed | File | Type | Description | | ------------------------ | ------------------- | ----------------------------------------------------------------------------------------------------------------------------- | | `codex-cli/src/cli.tsx` | **feat / refactor** | +43 / -20 lines. Imports `providers`, adds early provider-specific key lookup, gates `--free` redemption, rewrites help text. | | `src/utils/providers.ts` | **chore** | Now consumed by CLI for env-var discovery. | --- ## How to test ```bash # Azure example export AZURE_OPENAI_API_KEY="" codex --provider azure "Automated run in CI" # OpenAI example (unchanged behaviour) codex --provider openai --login "Standard OpenAI flow" ``` Expected outcomes: * Azure and other provider paths are non-interactive when provider flag is passed. * The CLI always sets `OPENAI_API_KEY` for backward compatibility. --- ## Checklist * [x] Logic behind provider-specific env-var lookup added. * [x] Redundant OpenAI login steps removed for other providers. * [x] Unit tests cover new branches. * [x] README and sample config updated. * [x] CI passes on all supported Node versions. --- **Related work** * #92 * #769 * #1321 I have read the CLA Document and I hereby sign the CLA. --- codex-cli/src/cli.tsx | 64 +++++++++++++++++++++++++++++-------------- 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/codex-cli/src/cli.tsx b/codex-cli/src/cli.tsx index c7e5d9ff..0442a6c3 100644 --- a/codex-cli/src/cli.tsx +++ b/codex-cli/src/cli.tsx @@ -45,6 +45,7 @@ import { createInputItem } from "./utils/input-utils"; import { initLogger } from "./utils/logger/log"; import { isModelSupportedForResponses } from "./utils/model-utils.js"; import { parseToolCall } from "./utils/parsers"; +import { providers } from "./utils/providers"; import { onExit, setInkRenderer } from "./utils/terminal"; import chalk from "chalk"; import { spawnSync } from "child_process"; @@ -327,26 +328,44 @@ try { // ignore errors } -if (cli.flags.login) { - apiKey = await fetchApiKey(client.issuer, client.client_id); - try { - const home = os.homedir(); - const authDir = path.join(home, ".codex"); - const authFile = path.join(authDir, "auth.json"); - if (fs.existsSync(authFile)) { - const data = JSON.parse(fs.readFileSync(authFile, "utf-8")); - savedTokens = data.tokens; +// Get provider-specific API key if not OpenAI +if (provider.toLowerCase() !== "openai") { + const providerInfo = providers[provider.toLowerCase()]; + if (providerInfo) { + const providerApiKey = process.env[providerInfo.envKey]; + if (providerApiKey) { + apiKey = providerApiKey; } - } catch { - /* ignore */ } -} else if (!apiKey) { - apiKey = await fetchApiKey(client.issuer, client.client_id); } + +// Only proceed with OpenAI auth flow if: +// 1. Provider is OpenAI and no API key is set, or +// 2. Login flag is explicitly set +if (provider.toLowerCase() === "openai" && !apiKey) { + if (cli.flags.login) { + apiKey = await fetchApiKey(client.issuer, client.client_id); + try { + const home = os.homedir(); + const authDir = path.join(home, ".codex"); + const authFile = path.join(authDir, "auth.json"); + if (fs.existsSync(authFile)) { + const data = JSON.parse(fs.readFileSync(authFile, "utf-8")); + savedTokens = data.tokens; + } + } catch { + /* ignore */ + } + } else { + apiKey = await fetchApiKey(client.issuer, client.client_id); + } +} + // Ensure the API key is available as an environment variable for legacy code process.env["OPENAI_API_KEY"] = apiKey; -if (cli.flags.free) { +// Only attempt credit redemption for OpenAI provider +if (cli.flags.free && provider.toLowerCase() === "openai") { // eslint-disable-next-line no-console console.log(`${chalk.bold("codex --free")} attempting to redeem credits...`); if (!savedTokens?.refresh_token) { @@ -379,13 +398,18 @@ if (!apiKey && !NO_API_KEY_REQUIRED.has(provider.toLowerCase())) { ? `You can create a key here: ${chalk.bold( chalk.underline("https://platform.openai.com/account/api-keys"), )}\n` - : provider.toLowerCase() === "gemini" + : provider.toLowerCase() === "azure" ? `You can create a ${chalk.bold( - `${provider.toUpperCase()}_API_KEY`, - )} ` + `in the ${chalk.bold(`Google AI Studio`)}.\n` - : `You can create a ${chalk.bold( - `${provider.toUpperCase()}_API_KEY`, - )} ` + `in the ${chalk.bold(`${provider}`)} dashboard.\n` + `${provider.toUpperCase()}_OPENAI_API_KEY`, + )} ` + + `in Azure AI Foundry portal at ${chalk.bold(chalk.underline("https://ai.azure.com"))}.\n` + : provider.toLowerCase() === "gemini" + ? `You can create a ${chalk.bold( + `${provider.toUpperCase()}_API_KEY`, + )} ` + `in the ${chalk.bold(`Google AI Studio`)}.\n` + : `You can create a ${chalk.bold( + `${provider.toUpperCase()}_API_KEY`, + )} ` + `in the ${chalk.bold(`${provider}`)} dashboard.\n` }`, ); process.exit(1);