diff --git a/codex-cli/src/components/chat/terminal-chat.tsx b/codex-cli/src/components/chat/terminal-chat.tsx index cf304ad7..6a1cfe1b 100644 --- a/codex-cli/src/components/chat/terminal-chat.tsx +++ b/codex-cli/src/components/chat/terminal-chat.tsx @@ -572,6 +572,7 @@ export default function TerminalChat({ {overlayMode === "model" && ( { diff --git a/codex-cli/src/components/model-overlay.tsx b/codex-cli/src/components/model-overlay.tsx index ec5e40d5..c9dde0e6 100644 --- a/codex-cli/src/components/model-overlay.tsx +++ b/codex-cli/src/components/model-overlay.tsx @@ -3,7 +3,6 @@ import { getAvailableModels, RECOMMENDED_MODELS as _RECOMMENDED_MODELS, } from "../utils/model-utils.js"; -import { providers } from "../utils/providers.js"; import { Box, Text, useInput } from "ink"; import React, { useEffect, useState } from "react"; @@ -19,6 +18,7 @@ type Props = { currentModel: string; currentProvider?: string; hasLastResponse: boolean; + providers?: Record; onSelect: (model: string) => void; onSelectProvider?: (provider: string) => void; onExit: () => void; @@ -26,6 +26,7 @@ type Props = { export default function ModelOverlay({ currentModel, + providers = {}, currentProvider = "openai", hasLastResponse, onSelect, diff --git a/codex-cli/src/utils/config.ts b/codex-cli/src/utils/config.ts index 91b35956..1df2e197 100644 --- a/codex-cli/src/utils/config.ts +++ b/codex-cli/src/utils/config.ts @@ -42,33 +42,33 @@ export function setApiKey(apiKey: string): void { } export function getBaseUrl(provider: string = "openai"): string | undefined { - // If the provider is `openai` and `OPENAI_BASE_URL` is set, use it - if (provider === "openai" && OPENAI_BASE_URL !== "") { - return OPENAI_BASE_URL; - } - - // Check for a PROVIDER-specific override: e.g. OLLAMA_BASE_URL + // Check for a PROVIDER-specific override: e.g. OPENAI_BASE_URL or OLLAMA_BASE_URL. const envKey = `${provider.toUpperCase()}_BASE_URL`; if (process.env[envKey]) { return process.env[envKey]; } - // Use the default URL from providers if available - const providerInfo = providers[provider.toLowerCase()]; + // Get providers config from config file. + const config = loadConfig(); + const providersConfig = config.providers ?? providers; + const providerInfo = providersConfig[provider.toLowerCase()]; if (providerInfo) { return providerInfo.baseURL; } - // If the provider not found in the providers list and `OPENAI_BASE_URL` is set, use it + // If the provider not found in the providers list and `OPENAI_BASE_URL` is set, use it. if (OPENAI_BASE_URL !== "") { return OPENAI_BASE_URL; } + // We tried. return undefined; } export function getApiKey(provider: string = "openai"): string | undefined { - const providerInfo = providers[provider.toLowerCase()]; + const config = loadConfig(); + const providersConfig = config.providers ?? providers; + const providerInfo = providersConfig[provider.toLowerCase()]; if (providerInfo) { if (providerInfo.name === "Ollama") { return process.env[providerInfo.envKey] ?? "dummy"; @@ -81,12 +81,10 @@ export function getApiKey(provider: string = "openai"): string | undefined { return OPENAI_API_KEY; } + // We tried. return undefined; } -// Formatting (quiet mode-only). -export const PRETTY_PRINT = Boolean(process.env["PRETTY_PRINT"] || ""); - // Represents config as persisted in config.json. export type StoredConfig = { model?: string; @@ -98,6 +96,7 @@ export type StoredConfig = { notify?: boolean; /** Disable server-side response storage (send full transcript each request) */ disableResponseStorage?: boolean; + providers?: Record; history?: { maxSize?: number; saveHistory?: boolean; @@ -134,6 +133,7 @@ export type AppConfig = { /** Enable the "flex-mode" processing mode for supported models (o3, o4-mini) */ flexMode?: boolean; + providers?: Record; history?: { maxSize: number; saveHistory: boolean; @@ -141,6 +141,9 @@ export type AppConfig = { }; }; +// Formatting (quiet mode-only). +export const PRETTY_PRINT = Boolean(process.env["PRETTY_PRINT"] || ""); + // --------------------------------------------------------------------------- // Project doc support (codex.md) // --------------------------------------------------------------------------- @@ -399,6 +402,9 @@ export const loadConfig = ( }; } + // Merge default providers with user configured providers in the config. + config.providers = { ...providers, ...storedConfig.providers }; + return config; }; @@ -431,6 +437,7 @@ export const saveConfig = ( const configToSave: StoredConfig = { model: config.model, provider: config.provider, + providers: config.providers, approvalMode: config.approvalMode, }; diff --git a/codex-cli/tests/config.test.tsx b/codex-cli/tests/config.test.tsx index 867b957d..e94b5b29 100644 --- a/codex-cli/tests/config.test.tsx +++ b/codex-cli/tests/config.test.tsx @@ -5,6 +5,7 @@ import { AutoApprovalMode } from "../src/utils/auto-approval-mode.js"; import { tmpdir } from "os"; import { join } from "path"; import { test, expect, beforeEach, afterEach, vi } from "vitest"; +import { providers as defaultProviders } from "../src/utils/providers"; // In‑memory FS store let memfs: Record = {}; @@ -148,3 +149,88 @@ test("loads and saves approvalMode correctly", () => { }); expect(reloadedConfig.approvalMode).toBe(AutoApprovalMode.FULL_AUTO); }); + +test("loads and saves providers correctly", () => { + // Setup custom providers configuration + const customProviders = { + openai: { + name: "Custom OpenAI", + baseURL: "https://custom-api.openai.com/v1", + envKey: "CUSTOM_OPENAI_API_KEY", + }, + anthropic: { + name: "Anthropic", + baseURL: "https://api.anthropic.com", + envKey: "ANTHROPIC_API_KEY", + }, + }; + + // Create config with providers + const testConfig = { + model: "test-model", + provider: "anthropic", + providers: customProviders, + instructions: "test instructions", + notify: false, + }; + + // Save the config + saveConfig(testConfig, testConfigPath, testInstructionsPath); + + // Verify saved config contains providers + expect(memfs[testConfigPath]).toContain(`"providers"`); + expect(memfs[testConfigPath]).toContain(`"Custom OpenAI"`); + expect(memfs[testConfigPath]).toContain(`"Anthropic"`); + expect(memfs[testConfigPath]).toContain(`"provider": "anthropic"`); + + // Load config and verify providers were loaded correctly + const loadedConfig = loadConfig(testConfigPath, testInstructionsPath, { + disableProjectDoc: true, + }); + + // Check providers were loaded correctly + expect(loadedConfig.provider).toBe("anthropic"); + expect(loadedConfig.providers).toEqual({ + ...defaultProviders, + ...customProviders, + }); + + // Test merging with built-in providers + // Create a config with only one custom provider + const partialProviders = { + customProvider: { + name: "Custom Provider", + baseURL: "https://custom-api.example.com", + envKey: "CUSTOM_API_KEY", + }, + }; + + const partialConfig = { + model: "test-model", + provider: "customProvider", + providers: partialProviders, + instructions: "test instructions", + notify: false, + }; + + // Save the partial config + saveConfig(partialConfig, testConfigPath, testInstructionsPath); + + // Load config and verify providers were merged with built-in providers + const mergedConfig = loadConfig(testConfigPath, testInstructionsPath, { + disableProjectDoc: true, + }); + + // Check providers is defined + expect(mergedConfig.providers).toBeDefined(); + + // Use bracket notation to access properties + if (mergedConfig.providers) { + expect(mergedConfig.providers["customProvider"]).toBeDefined(); + expect(mergedConfig.providers["customProvider"]).toEqual( + partialProviders.customProvider, + ); + // Built-in providers should still be there (like openai) + expect(mergedConfig.providers["openai"]).toBeDefined(); + } +});