import { OPENAI_API_KEY } from "./config"; import OpenAI from "openai"; const MODEL_LIST_TIMEOUT_MS = 2_000; // 2 seconds export const RECOMMENDED_MODELS: Array = ["o4-mini", "o3"]; /** * Background model loader / cache. * * We start fetching the list of available models from OpenAI once the CLI * enters interactive mode. The request is made exactly once during the * lifetime of the process and the results are cached for subsequent calls. */ let modelsPromise: Promise> | null = null; async function fetchModels(): Promise> { // If the user has not configured an API key we cannot hit the network. if (!OPENAI_API_KEY) { return RECOMMENDED_MODELS; } try { const openai = new OpenAI({ apiKey: OPENAI_API_KEY }); const list = await openai.models.list(); const models: Array = []; for await (const model of list as AsyncIterable<{ id?: string }>) { if (model && typeof model.id === "string") { models.push(model.id); } } return models.sort(); } catch { return []; } } export function preloadModels(): void { if (!modelsPromise) { // Fire‑and‑forget – callers that truly need the list should `await` // `getAvailableModels()` instead. void getAvailableModels(); } } export async function getAvailableModels(): Promise> { if (!modelsPromise) { modelsPromise = fetchModels(); } return modelsPromise; } /** * Verify that the provided model identifier is present in the set returned by * {@link getAvailableModels}. The list of models is fetched from the OpenAI * `/models` endpoint the first time it is required and then cached in‑process. */ export async function isModelSupportedForResponses( model: string | undefined | null, ): Promise { if ( typeof model !== "string" || model.trim() === "" || RECOMMENDED_MODELS.includes(model) ) { return true; } try { const models = await Promise.race>([ getAvailableModels(), new Promise>((resolve) => setTimeout(() => resolve([]), MODEL_LIST_TIMEOUT_MS), ), ]); // If the timeout fired we get an empty list → treat as supported to avoid // false negatives. if (models.length === 0) { return true; } return models.includes(model.trim()); } catch { // Network or library failure → don't block start‑up. return true; } }