feat: auto-open model selector if user selects deprecated model (#427)

https://github.com/user-attachments/assets/d254dff6-f1f9-4492-9dfd-d185c38d3a75
This commit is contained in:
Abdelrhman Kamal Mahmoud Ali Slim
2025-04-20 09:51:49 -07:00
committed by GitHub
parent e372e4667b
commit 81cf47e591
3 changed files with 38 additions and 5 deletions

View File

@@ -1,3 +1,4 @@
import type { OverlayModeType } from "./terminal-chat";
import type { TerminalRendererOptions } from "marked-terminal";
import type {
ResponseFunctionToolCallItem,
@@ -14,18 +15,25 @@ import chalk, { type ForegroundColorName } from "chalk";
import { Box, Text } from "ink";
import { parse, setOptions } from "marked";
import TerminalRenderer from "marked-terminal";
import React, { useMemo } from "react";
import React, { useEffect, useMemo } from "react";
export default function TerminalChatResponseItem({
item,
fullStdout = false,
setOverlayMode,
}: {
item: ResponseItem;
fullStdout?: boolean;
setOverlayMode?: React.Dispatch<React.SetStateAction<OverlayModeType>>;
}): React.ReactElement {
switch (item.type) {
case "message":
return <TerminalChatResponseMessage message={item} />;
return (
<TerminalChatResponseMessage
setOverlayMode={setOverlayMode}
message={item}
/>
);
case "function_call":
return <TerminalChatResponseToolCall message={item} />;
case "function_call_output":
@@ -98,9 +106,23 @@ const colorsByRole: Record<string, ForegroundColorName> = {
function TerminalChatResponseMessage({
message,
setOverlayMode,
}: {
message: ResponseInputMessageItem | ResponseOutputMessage;
setOverlayMode?: React.Dispatch<React.SetStateAction<OverlayModeType>>;
}) {
// auto switch to model mode if the system message contains "has been deprecated"
useEffect(() => {
if (message.role === "system") {
const systemMessage = message.content.find(
(c) => c.type === "input_text",
)?.text;
if (systemMessage?.includes("has been deprecated")) {
setOverlayMode?.("model");
}
}
}, [message, setOverlayMode]);
return (
<Box flexDirection="column">
<Text bold color={colorsByRole[message.role] || "gray"}>

View File

@@ -37,6 +37,14 @@ import OpenAI from "openai";
import React, { useEffect, useMemo, useRef, useState } from "react";
import { inspect } from "util";
export type OverlayModeType =
| "none"
| "history"
| "model"
| "approval"
| "help"
| "diff";
type Props = {
config: AppConfig;
prompt?: string;
@@ -182,9 +190,7 @@ export default function TerminalChat({
explanation,
submitConfirmation,
} = useConfirmation();
const [overlayMode, setOverlayMode] = useState<
"none" | "history" | "model" | "approval" | "help" | "diff"
>("none");
const [overlayMode, setOverlayMode] = useState<OverlayModeType>("none");
// Store the diff text when opening the diff overlay so the view isnt
// recomputed on every rerender while it is open.
@@ -461,6 +467,7 @@ export default function TerminalChat({
<Box flexDirection="column">
{agent ? (
<TerminalMessageHistory
setOverlayMode={setOverlayMode}
batch={lastMessageBatch}
groupCounts={groupCounts}
items={items}

View File

@@ -1,3 +1,4 @@
import type { OverlayModeType } from "./terminal-chat.js";
import type { TerminalHeaderProps } from "./terminal-header.js";
import type { GroupedResponseItem } from "./use-message-grouping.js";
import type { ResponseItem } from "openai/resources/responses/responses.mjs";
@@ -21,6 +22,7 @@ type MessageHistoryProps = {
thinkingSeconds: number;
headerProps: TerminalHeaderProps;
fullStdout: boolean;
setOverlayMode: React.Dispatch<React.SetStateAction<OverlayModeType>>;
};
const MessageHistory: React.FC<MessageHistoryProps> = ({
@@ -30,6 +32,7 @@ const MessageHistory: React.FC<MessageHistoryProps> = ({
loading: _loading,
thinkingSeconds: _thinkingSeconds,
fullStdout,
setOverlayMode,
}) => {
// Flatten batch entries to response items.
const messages = useMemo(() => batch.map(({ item }) => item!), [batch]);
@@ -65,6 +68,7 @@ const MessageHistory: React.FC<MessageHistoryProps> = ({
<TerminalChatResponseItem
item={message}
fullStdout={fullStdout}
setOverlayMode={setOverlayMode}
/>
</Box>
);