feat: auto-open model selector if user selects deprecated model (#427)

https://github.com/user-attachments/assets/d254dff6-f1f9-4492-9dfd-d185c38d3a75
This commit is contained in:
Abdelrhman Kamal Mahmoud Ali Slim
2025-04-20 09:51:49 -07:00
committed by GitHub
parent e372e4667b
commit 81cf47e591
3 changed files with 38 additions and 5 deletions

View File

@@ -1,3 +1,4 @@
import type { OverlayModeType } from "./terminal-chat";
import type { TerminalRendererOptions } from "marked-terminal"; import type { TerminalRendererOptions } from "marked-terminal";
import type { import type {
ResponseFunctionToolCallItem, ResponseFunctionToolCallItem,
@@ -14,18 +15,25 @@ import chalk, { type ForegroundColorName } from "chalk";
import { Box, Text } from "ink"; import { Box, Text } from "ink";
import { parse, setOptions } from "marked"; import { parse, setOptions } from "marked";
import TerminalRenderer from "marked-terminal"; import TerminalRenderer from "marked-terminal";
import React, { useMemo } from "react"; import React, { useEffect, useMemo } from "react";
export default function TerminalChatResponseItem({ export default function TerminalChatResponseItem({
item, item,
fullStdout = false, fullStdout = false,
setOverlayMode,
}: { }: {
item: ResponseItem; item: ResponseItem;
fullStdout?: boolean; fullStdout?: boolean;
setOverlayMode?: React.Dispatch<React.SetStateAction<OverlayModeType>>;
}): React.ReactElement { }): React.ReactElement {
switch (item.type) { switch (item.type) {
case "message": case "message":
return <TerminalChatResponseMessage message={item} />; return (
<TerminalChatResponseMessage
setOverlayMode={setOverlayMode}
message={item}
/>
);
case "function_call": case "function_call":
return <TerminalChatResponseToolCall message={item} />; return <TerminalChatResponseToolCall message={item} />;
case "function_call_output": case "function_call_output":
@@ -98,9 +106,23 @@ const colorsByRole: Record<string, ForegroundColorName> = {
function TerminalChatResponseMessage({ function TerminalChatResponseMessage({
message, message,
setOverlayMode,
}: { }: {
message: ResponseInputMessageItem | ResponseOutputMessage; message: ResponseInputMessageItem | ResponseOutputMessage;
setOverlayMode?: React.Dispatch<React.SetStateAction<OverlayModeType>>;
}) { }) {
// auto switch to model mode if the system message contains "has been deprecated"
useEffect(() => {
if (message.role === "system") {
const systemMessage = message.content.find(
(c) => c.type === "input_text",
)?.text;
if (systemMessage?.includes("has been deprecated")) {
setOverlayMode?.("model");
}
}
}, [message, setOverlayMode]);
return ( return (
<Box flexDirection="column"> <Box flexDirection="column">
<Text bold color={colorsByRole[message.role] || "gray"}> <Text bold color={colorsByRole[message.role] || "gray"}>

View File

@@ -37,6 +37,14 @@ import OpenAI from "openai";
import React, { useEffect, useMemo, useRef, useState } from "react"; import React, { useEffect, useMemo, useRef, useState } from "react";
import { inspect } from "util"; import { inspect } from "util";
export type OverlayModeType =
| "none"
| "history"
| "model"
| "approval"
| "help"
| "diff";
type Props = { type Props = {
config: AppConfig; config: AppConfig;
prompt?: string; prompt?: string;
@@ -182,9 +190,7 @@ export default function TerminalChat({
explanation, explanation,
submitConfirmation, submitConfirmation,
} = useConfirmation(); } = useConfirmation();
const [overlayMode, setOverlayMode] = useState< const [overlayMode, setOverlayMode] = useState<OverlayModeType>("none");
"none" | "history" | "model" | "approval" | "help" | "diff"
>("none");
// Store the diff text when opening the diff overlay so the view isnt // Store the diff text when opening the diff overlay so the view isnt
// recomputed on every rerender while it is open. // recomputed on every rerender while it is open.
@@ -461,6 +467,7 @@ export default function TerminalChat({
<Box flexDirection="column"> <Box flexDirection="column">
{agent ? ( {agent ? (
<TerminalMessageHistory <TerminalMessageHistory
setOverlayMode={setOverlayMode}
batch={lastMessageBatch} batch={lastMessageBatch}
groupCounts={groupCounts} groupCounts={groupCounts}
items={items} items={items}

View File

@@ -1,3 +1,4 @@
import type { OverlayModeType } from "./terminal-chat.js";
import type { TerminalHeaderProps } from "./terminal-header.js"; import type { TerminalHeaderProps } from "./terminal-header.js";
import type { GroupedResponseItem } from "./use-message-grouping.js"; import type { GroupedResponseItem } from "./use-message-grouping.js";
import type { ResponseItem } from "openai/resources/responses/responses.mjs"; import type { ResponseItem } from "openai/resources/responses/responses.mjs";
@@ -21,6 +22,7 @@ type MessageHistoryProps = {
thinkingSeconds: number; thinkingSeconds: number;
headerProps: TerminalHeaderProps; headerProps: TerminalHeaderProps;
fullStdout: boolean; fullStdout: boolean;
setOverlayMode: React.Dispatch<React.SetStateAction<OverlayModeType>>;
}; };
const MessageHistory: React.FC<MessageHistoryProps> = ({ const MessageHistory: React.FC<MessageHistoryProps> = ({
@@ -30,6 +32,7 @@ const MessageHistory: React.FC<MessageHistoryProps> = ({
loading: _loading, loading: _loading,
thinkingSeconds: _thinkingSeconds, thinkingSeconds: _thinkingSeconds,
fullStdout, fullStdout,
setOverlayMode,
}) => { }) => {
// Flatten batch entries to response items. // Flatten batch entries to response items.
const messages = useMemo(() => batch.map(({ item }) => item!), [batch]); const messages = useMemo(() => batch.map(({ item }) => item!), [batch]);
@@ -65,6 +68,7 @@ const MessageHistory: React.FC<MessageHistoryProps> = ({
<TerminalChatResponseItem <TerminalChatResponseItem
item={message} item={message}
fullStdout={fullStdout} fullStdout={fullStdout}
setOverlayMode={setOverlayMode}
/> />
</Box> </Box>
); );