diff --git a/codex-cli/src/components/chat/terminal-chat.tsx b/codex-cli/src/components/chat/terminal-chat.tsx index 9092932a..cf304ad7 100644 --- a/codex-cli/src/components/chat/terminal-chat.tsx +++ b/codex-cli/src/components/chat/terminal-chat.tsx @@ -13,7 +13,7 @@ import { useTerminalSize } from "../../hooks/use-terminal-size.js"; import { AgentLoop } from "../../utils/agent/agent-loop.js"; import { ReviewDecision } from "../../utils/agent/review.js"; import { generateCompactSummary } from "../../utils/compact-summary.js"; -import { OPENAI_BASE_URL, saveConfig } from "../../utils/config.js"; +import { getBaseUrl, getApiKey, saveConfig } from "../../utils/config.js"; import { extractAppliedPatches as _extractAppliedPatches } from "../../utils/extract-applied-patches.js"; import { getGitDiff } from "../../utils/get-diff.js"; import { createInputItem } from "../../utils/input-utils.js"; @@ -65,18 +65,21 @@ const colorsByPolicy: Record = { * * @param command The command to explain * @param model The model to use for generating the explanation + * @param flexMode Whether to use the flex-mode service tier + * @param config The configuration object * @returns A human-readable explanation of what the command does */ async function generateCommandExplanation( command: Array, model: string, flexMode: boolean, + config: AppConfig, ): Promise { try { // Create a temporary OpenAI client const oai = new OpenAI({ - apiKey: process.env["OPENAI_API_KEY"], - baseURL: OPENAI_BASE_URL, + apiKey: getApiKey(config.provider), + baseURL: getBaseUrl(config.provider), }); // Format the command for display @@ -156,6 +159,7 @@ export default function TerminalChat({ items, model, Boolean(config.flexMode), + config, ); setItems([ { @@ -272,6 +276,7 @@ export default function TerminalChat({ command, model, Boolean(config.flexMode), + config, ); log(`Generated explanation: ${explanation}`); diff --git a/codex-cli/src/components/singlepass-cli-app.tsx b/codex-cli/src/components/singlepass-cli-app.tsx index 0c5eeb4e..56d1d913 100644 --- a/codex-cli/src/components/singlepass-cli-app.tsx +++ b/codex-cli/src/components/singlepass-cli-app.tsx @@ -5,12 +5,7 @@ import type { FileOperation } from "../utils/singlepass/file_ops"; import Spinner from "./vendor/ink-spinner"; // Third‑party / vendor components import TextInput from "./vendor/ink-text-input"; -import { - OPENAI_TIMEOUT_MS, - OPENAI_BASE_URL as _OPENAI_BASE_URL, - getBaseUrl, - getApiKey, -} from "../utils/config"; +import { OPENAI_TIMEOUT_MS, getBaseUrl, getApiKey } from "../utils/config"; import { generateDiffSummary, generateEditSummary, @@ -399,8 +394,8 @@ export function SinglePassApp({ }); const openai = new OpenAI({ - apiKey: getApiKey(config.provider ?? "openai"), - baseURL: getBaseUrl(config.provider ?? "openai"), + apiKey: getApiKey(config.provider), + baseURL: getBaseUrl(config.provider), timeout: OPENAI_TIMEOUT_MS, }); const chatResp = await openai.beta.chat.completions.parse({ diff --git a/codex-cli/src/utils/compact-summary.ts b/codex-cli/src/utils/compact-summary.ts index 040145da..82a337e3 100644 --- a/codex-cli/src/utils/compact-summary.ts +++ b/codex-cli/src/utils/compact-summary.ts @@ -1,8 +1,8 @@ +import type { AppConfig } from "./config.js"; import type { ResponseItem } from "openai/resources/responses/responses.mjs"; -import { OPENAI_BASE_URL } from "./config.js"; +import { getBaseUrl, getApiKey } from "./config.js"; import OpenAI from "openai"; - /** * Generate a condensed summary of the conversation items. * @param items The list of conversation items to summarize @@ -14,16 +14,18 @@ import OpenAI from "openai"; * @param items The list of conversation items to summarize * @param model The model to use for generating the summary * @param flexMode Whether to use the flex-mode service tier + * @param config The configuration object * @returns A concise structured summary string */ export async function generateCompactSummary( items: Array, model: string, flexMode = false, + config: AppConfig, ): Promise { const oai = new OpenAI({ - apiKey: process.env["OPENAI_API_KEY"], - baseURL: OPENAI_BASE_URL, + apiKey: getApiKey(config.provider), + baseURL: getBaseUrl(config.provider), }); const conversationText = items diff --git a/codex-cli/src/utils/config.ts b/codex-cli/src/utils/config.ts index a8d73feb..0c9cffa4 100644 --- a/codex-cli/src/utils/config.ts +++ b/codex-cli/src/utils/config.ts @@ -41,15 +41,26 @@ export function setApiKey(apiKey: string): void { OPENAI_API_KEY = apiKey; } -export function getBaseUrl(provider: string): string | undefined { +export function getBaseUrl(provider: string = "openai"): string | undefined { + // If the provider is `openai` and `OPENAI_BASE_URL` is set, use it + if (provider === "openai" && OPENAI_BASE_URL !== "") { + return OPENAI_BASE_URL; + } + const providerInfo = providers[provider.toLowerCase()]; if (providerInfo) { return providerInfo.baseURL; } + + // If the provider not found in the providers list and `OPENAI_BASE_URL` is set, use it + if (OPENAI_BASE_URL !== "") { + return OPENAI_BASE_URL; + } + return undefined; } -export function getApiKey(provider: string): string | undefined { +export function getApiKey(provider: string = "openai"): string | undefined { const providerInfo = providers[provider.toLowerCase()]; if (providerInfo) { if (providerInfo.name === "Ollama") { @@ -58,6 +69,11 @@ export function getApiKey(provider: string): string | undefined { return process.env[providerInfo.envKey]; } + // If the provider not found in the providers list and `OPENAI_API_KEY` is set, use it + if (OPENAI_API_KEY !== "") { + return OPENAI_API_KEY; + } + return undefined; } diff --git a/codex-cli/src/utils/model-utils.ts b/codex-cli/src/utils/model-utils.ts index 31f1afe6..774371ed 100644 --- a/codex-cli/src/utils/model-utils.ts +++ b/codex-cli/src/utils/model-utils.ts @@ -20,9 +20,11 @@ async function fetchModels(provider: string): Promise> { throw new Error("No API key configured for provider: " + provider); } - const baseURL = getBaseUrl(provider); try { - const openai = new OpenAI({ apiKey: getApiKey(provider), baseURL }); + const openai = new OpenAI({ + apiKey: getApiKey(provider), + baseURL: getBaseUrl(provider), + }); const list = await openai.models.list(); const models: Array = []; for await (const model of list as AsyncIterable<{ id?: string }>) {