feat: support multiple providers via Responses-Completion transformation (#247)

https://github.com/user-attachments/assets/9ecb51be-fa65-4e99-8512-abb898dda569

Implemented it as a transformation between Responses API and Completion
API so that it supports existing providers that implement the Completion
API and minimizes the changes needed to the codex repo.

---------

Co-authored-by: Thibault Sottiaux <tibo@openai.com>
Co-authored-by: Fouad Matin <169186268+fouad-openai@users.noreply.github.com>
Co-authored-by: Fouad Matin <fouad@openai.com>
This commit is contained in:
Daniel Nakov
2025-04-20 23:59:34 -04:00
committed by GitHub
parent 693bd59ecc
commit eafbc75612
11 changed files with 1870 additions and 83 deletions

View File

@@ -18,7 +18,7 @@ import { AgentLoop } from "../../utils/agent/agent-loop.js";
import { log } from "../../utils/agent/log.js";
import { ReviewDecision } from "../../utils/agent/review.js";
import { generateCompactSummary } from "../../utils/compact-summary.js";
import { OPENAI_BASE_URL } from "../../utils/config.js";
import { OPENAI_BASE_URL, saveConfig } from "../../utils/config.js";
import { extractAppliedPatches as _extractAppliedPatches } from "../../utils/extract-applied-patches.js";
import { getGitDiff } from "../../utils/get-diff.js";
import { createInputItem } from "../../utils/input-utils.js";
@@ -144,6 +144,7 @@ export default function TerminalChat({
// Desktop notification setting
const notify = config.notify;
const [model, setModel] = useState<string>(config.model);
const [provider, setProvider] = useState<string>(config.provider || "openai");
const [lastResponseId, setLastResponseId] = useState<string | null>(null);
const [items, setItems] = useState<Array<ResponseItem>>([]);
const [loading, setLoading] = useState<boolean>(false);
@@ -228,7 +229,7 @@ export default function TerminalChat({
log("creating NEW AgentLoop");
log(
`model=${model} instructions=${Boolean(
`model=${model} provider=${provider} instructions=${Boolean(
config.instructions,
)} approvalPolicy=${approvalPolicy}`,
);
@@ -238,6 +239,7 @@ export default function TerminalChat({
agentRef.current = new AgentLoop({
model,
provider,
config,
instructions: config.instructions,
approvalPolicy,
@@ -307,10 +309,15 @@ export default function TerminalChat({
agentRef.current = undefined;
forceUpdate(); // rerender after teardown too
};
// We intentionally omit 'approvalPolicy' and 'confirmationPrompt' from the deps
// so switching modes or showing confirmation dialogs doesnt tear down the loop.
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [model, config, requestConfirmation, additionalWritableRoots]);
}, [
model,
provider,
config,
approvalPolicy,
confirmationPrompt,
requestConfirmation,
additionalWritableRoots,
]);
// whenever loading starts/stops, reset or start a timer — but pause the
// timer while a confirmation overlay is displayed so we don't trigger a
@@ -417,7 +424,7 @@ export default function TerminalChat({
// ────────────────────────────────────────────────────────────────
useEffect(() => {
(async () => {
const available = await getAvailableModels();
const available = await getAvailableModels(provider);
if (model && available.length > 0 && !available.includes(model)) {
setItems((prev) => [
...prev,
@@ -428,7 +435,7 @@ export default function TerminalChat({
content: [
{
type: "input_text",
text: `Warning: model "${model}" is not in the list of available models returned by OpenAI.`,
text: `Warning: model "${model}" is not in the list of available models for provider "${provider}".`,
},
],
},
@@ -470,6 +477,7 @@ export default function TerminalChat({
version: CLI_VERSION,
PWD,
model,
provider,
approvalPolicy,
colorsByPolicy,
agent,
@@ -566,6 +574,7 @@ export default function TerminalChat({
{overlayMode === "model" && (
<ModelOverlay
currentModel={model}
currentProvider={provider}
hasLastResponse={Boolean(lastResponseId)}
onSelect={(newModel) => {
log(
@@ -582,6 +591,13 @@ export default function TerminalChat({
prev && newModel !== model ? null : prev,
);
// Save model to config
saveConfig({
...config,
model: newModel,
provider: provider,
});
setItems((prev) => [
...prev,
{
@@ -599,6 +615,51 @@ export default function TerminalChat({
setOverlayMode("none");
}}
onSelectProvider={(newProvider) => {
log(
"TerminalChat: interruptAgent invoked calling agent.cancel()",
);
if (!agent) {
log("TerminalChat: agent is not ready yet");
}
agent?.cancel();
setLoading(false);
// Select default model for the new provider
const defaultModel = model;
// Save provider to config
const updatedConfig = {
...config,
provider: newProvider,
model: defaultModel,
};
saveConfig(updatedConfig);
setProvider(newProvider);
setModel(defaultModel);
setLastResponseId((prev) =>
prev && newProvider !== provider ? null : prev,
);
setItems((prev) => [
...prev,
{
id: `switch-provider-${Date.now()}`,
type: "message",
role: "system",
content: [
{
type: "input_text",
text: `Switched provider to ${newProvider} with model ${defaultModel}`,
},
],
},
]);
// Don't close the overlay so user can select a model for the new provider
// setOverlayMode("none");
}}
onExit={() => setOverlayMode("none")}
/>
)}

View File

@@ -9,6 +9,7 @@ export interface TerminalHeaderProps {
version: string;
PWD: string;
model: string;
provider?: string;
approvalPolicy: string;
colorsByPolicy: Record<string, string | undefined>;
agent?: AgentLoop;
@@ -21,6 +22,7 @@ const TerminalHeader: React.FC<TerminalHeaderProps> = ({
version,
PWD,
model,
provider = "openai",
approvalPolicy,
colorsByPolicy,
agent,
@@ -32,7 +34,7 @@ const TerminalHeader: React.FC<TerminalHeaderProps> = ({
{terminalRows < 10 ? (
// Compact header for small terminal windows
<Text>
Codex v{version} {PWD} {model} {" "}
Codex v{version} {PWD} {model} ({provider}) {" "}
<Text color={colorsByPolicy[approvalPolicy]}>{approvalPolicy}</Text>
{flexModeEnabled ? " flex-mode" : ""}
</Text>
@@ -65,6 +67,10 @@ const TerminalHeader: React.FC<TerminalHeaderProps> = ({
<Text dimColor>
<Text color="blueBright"></Text> model: <Text bold>{model}</Text>
</Text>
<Text dimColor>
<Text color="blueBright"></Text> provider:{" "}
<Text bold>{provider}</Text>
</Text>
<Text dimColor>
<Text color="blueBright"></Text> approval:{" "}
<Text bold color={colorsByPolicy[approvalPolicy]} dimColor>

View File

@@ -1,8 +1,9 @@
import TypeaheadOverlay from "./typeahead-overlay.js";
import {
getAvailableModels,
RECOMMENDED_MODELS,
RECOMMENDED_MODELS as _RECOMMENDED_MODELS,
} from "../utils/model-utils.js";
import { providers } from "../utils/providers.js";
import { Box, Text, useInput } from "ink";
import React, { useEffect, useState } from "react";
@@ -16,39 +17,51 @@ import React, { useEffect, useState } from "react";
*/
type Props = {
currentModel: string;
currentProvider?: string;
hasLastResponse: boolean;
onSelect: (model: string) => void;
onSelectProvider?: (provider: string) => void;
onExit: () => void;
};
export default function ModelOverlay({
currentModel,
currentProvider = "openai",
hasLastResponse,
onSelect,
onSelectProvider,
onExit,
}: Props): JSX.Element {
const [items, setItems] = useState<Array<{ label: string; value: string }>>(
[],
);
const [providerItems, _setProviderItems] = useState<
Array<{ label: string; value: string }>
>(Object.values(providers).map((p) => ({ label: p.name, value: p.name })));
const [mode, setMode] = useState<"model" | "provider">("model");
const [isLoading, setIsLoading] = useState<boolean>(true);
// This effect will run when the provider changes to update the model list
useEffect(() => {
setIsLoading(true);
(async () => {
const models = await getAvailableModels();
// Split the list into recommended and “other” models.
const recommended = RECOMMENDED_MODELS.filter((m) => models.includes(m));
const others = models.filter((m) => !recommended.includes(m));
const ordered = [...recommended, ...others.sort()];
setItems(
ordered.map((m) => ({
label: recommended.includes(m) ? `${m}` : m,
value: m,
})),
);
try {
const models = await getAvailableModels(currentProvider);
// Convert the models to the format needed by TypeaheadOverlay
setItems(
models.map((m) => ({
label: m,
value: m,
})),
);
} catch (error) {
// Silently handle errors - remove console.error
// console.error("Error loading models:", error);
} finally {
setIsLoading(false);
}
})();
}, []);
}, [currentProvider]);
// ---------------------------------------------------------------------------
// If the conversation already contains a response we cannot change the model
@@ -58,10 +71,14 @@ export default function ModelOverlay({
// available action is to dismiss the overlay (Esc or Enter).
// ---------------------------------------------------------------------------
// Always register input handling so hooks are called consistently.
// Register input handling for switching between model and provider selection
useInput((_input, key) => {
if (hasLastResponse && (key.escape || key.return)) {
onExit();
} else if (!hasLastResponse) {
if (key.tab) {
setMode(mode === "model" ? "provider" : "model");
}
}
});
@@ -91,13 +108,47 @@ export default function ModelOverlay({
);
}
if (mode === "provider") {
return (
<TypeaheadOverlay
title="Select provider"
description={
<Box flexDirection="column">
<Text>
Current provider:{" "}
<Text color="greenBright">{currentProvider}</Text>
</Text>
<Text dimColor>press tab to switch to model selection</Text>
</Box>
}
initialItems={providerItems}
currentValue={currentProvider}
onSelect={(provider) => {
if (onSelectProvider) {
onSelectProvider(provider);
// Immediately switch to model selection so user can pick a model for the new provider
setMode("model");
}
}}
onExit={onExit}
/>
);
}
return (
<TypeaheadOverlay
title="Switch model"
title="Select model"
description={
<Text>
Current model: <Text color="greenBright">{currentModel}</Text>
</Text>
<Box flexDirection="column">
<Text>
Current model: <Text color="greenBright">{currentModel}</Text>
</Text>
<Text>
Current provider: <Text color="greenBright">{currentProvider}</Text>
</Text>
{isLoading && <Text color="yellow">Loading models...</Text>}
<Text dimColor>press tab to switch to provider selection</Text>
</Box>
}
initialItems={items}
currentValue={currentModel}

View File

@@ -5,7 +5,12 @@ import type { FileOperation } from "../utils/singlepass/file_ops";
import Spinner from "./vendor/ink-spinner"; // Thirdparty / vendor components
import TextInput from "./vendor/ink-text-input";
import { OPENAI_TIMEOUT_MS, OPENAI_BASE_URL } from "../utils/config";
import {
OPENAI_TIMEOUT_MS,
OPENAI_BASE_URL as _OPENAI_BASE_URL,
getBaseUrl,
getApiKey,
} from "../utils/config";
import {
generateDiffSummary,
generateEditSummary,
@@ -394,8 +399,8 @@ export function SinglePassApp({
});
const openai = new OpenAI({
apiKey: config.apiKey ?? "",
baseURL: OPENAI_BASE_URL || undefined,
apiKey: getApiKey(config.provider),
baseURL: getBaseUrl(config.provider),
timeout: OPENAI_TIMEOUT_MS,
});
const chatResp = await openai.beta.chat.completions.parse({