diff --git a/codex-cli/src/components/chat/terminal-chat-input.tsx b/codex-cli/src/components/chat/terminal-chat-input.tsx
index 8fd0c63a..1a659064 100644
--- a/codex-cli/src/components/chat/terminal-chat-input.tsx
+++ b/codex-cli/src/components/chat/terminal-chat-input.tsx
@@ -42,6 +42,7 @@ export default function TerminalChatInput({
openModelOverlay,
openApprovalOverlay,
openHelpOverlay,
+ onCompact,
interruptAgent,
active,
}: {
@@ -61,6 +62,7 @@ export default function TerminalChatInput({
openModelOverlay: () => void;
openApprovalOverlay: () => void;
openHelpOverlay: () => void;
+ onCompact: () => void;
interruptAgent: () => void;
active: boolean;
}): React.ReactElement {
@@ -166,6 +168,12 @@ export default function TerminalChatInput({
return;
}
+ if (inputValue === "/compact") {
+ setInput("");
+ onCompact();
+ return;
+ }
+
if (inputValue.startsWith("/model")) {
setInput("");
openModelOverlay();
@@ -295,6 +303,7 @@ export default function TerminalChatInput({
openModelOverlay,
openHelpOverlay,
history, // Add history to the dependency array
+ onCompact,
],
);
@@ -366,7 +375,8 @@ export default function TerminalChatInput({
<>
{" — "}
- {Math.round(contextLeftPercent)}% context left
+ {Math.round(contextLeftPercent)}% context left — send
+ "/compact" to condense context
>
)}
diff --git a/codex-cli/src/components/chat/terminal-chat.tsx b/codex-cli/src/components/chat/terminal-chat.tsx
index 7885f1f6..50ee4479 100644
--- a/codex-cli/src/components/chat/terminal-chat.tsx
+++ b/codex-cli/src/components/chat/terminal-chat.tsx
@@ -17,6 +17,7 @@ import { useTerminalSize } from "../../hooks/use-terminal-size.js";
import { AgentLoop } from "../../utils/agent/agent-loop.js";
import { isLoggingEnabled, log } from "../../utils/agent/log.js";
import { ReviewDecision } from "../../utils/agent/review.js";
+import { generateCompactSummary } from "../../utils/compact-summary.js";
import { OPENAI_BASE_URL } from "../../utils/config.js";
import { createInputItem } from "../../utils/input-utils.js";
import { getAvailableModels } from "../../utils/model-utils.js";
@@ -138,6 +139,34 @@ export default function TerminalChat({
initialApprovalPolicy,
);
const [thinkingSeconds, setThinkingSeconds] = useState(0);
+ const handleCompact = async () => {
+ setLoading(true);
+ try {
+ const summary = await generateCompactSummary(items, model);
+ setItems([
+ {
+ id: `compact-${Date.now()}`,
+ type: "message",
+ role: "assistant",
+ content: [{ type: "output_text", text: summary }],
+ } as ResponseItem,
+ ]);
+ } catch (err) {
+ setItems((prev) => [
+ ...prev,
+ {
+ id: `compact-error-${Date.now()}`,
+ type: "message",
+ role: "system",
+ content: [
+ { type: "input_text", text: `Failed to compact context: ${err}` },
+ ],
+ } as ResponseItem,
+ ]);
+ } finally {
+ setLoading(false);
+ }
+ };
const {
requestConfirmation,
confirmationPrompt,
@@ -453,6 +482,7 @@ export default function TerminalChat({
openModelOverlay={() => setOverlayMode("model")}
openApprovalOverlay={() => setOverlayMode("approval")}
openHelpOverlay={() => setOverlayMode("help")}
+ onCompact={handleCompact}
active={overlayMode === "none"}
interruptAgent={() => {
if (!agent) {
diff --git a/codex-cli/src/components/help-overlay.tsx b/codex-cli/src/components/help-overlay.tsx
index 023fa202..9feabc16 100644
--- a/codex-cli/src/components/help-overlay.tsx
+++ b/codex-cli/src/components/help-overlay.tsx
@@ -52,6 +52,9 @@ export default function HelpOverlay({
/clearhistory – clear command history
+
+ /compact – condense context into a summary
+
diff --git a/codex-cli/src/utils/compact-summary.ts b/codex-cli/src/utils/compact-summary.ts
new file mode 100644
index 00000000..81474396
--- /dev/null
+++ b/codex-cli/src/utils/compact-summary.ts
@@ -0,0 +1,60 @@
+import type { ResponseItem } from "openai/resources/responses/responses.mjs";
+
+import { OPENAI_BASE_URL } from "./config.js";
+import OpenAI from "openai";
+
+/**
+ * Generate a condensed summary of the conversation items.
+ * @param items The list of conversation items to summarize
+ * @param model The model to use for generating the summary
+ * @returns A concise structured summary string
+ */
+export async function generateCompactSummary(
+ items: Array,
+ model: string,
+): Promise {
+ const oai = new OpenAI({
+ apiKey: process.env["OPENAI_API_KEY"],
+ baseURL: OPENAI_BASE_URL,
+ });
+
+ const conversationText = items
+ .filter(
+ (
+ item,
+ ): item is ResponseItem & { content: Array; role: string } =>
+ item.type === "message" &&
+ (item.role === "user" || item.role === "assistant") &&
+ Array.isArray(item.content),
+ )
+ .map((item) => {
+ const text = item.content
+ .filter(
+ (part): part is { text: string } =>
+ typeof part === "object" &&
+ part != null &&
+ "text" in part &&
+ typeof (part as { text: unknown }).text === "string",
+ )
+ .map((part) => part.text)
+ .join("");
+ return `${item.role}: ${text}`;
+ })
+ .join("\n");
+
+ const response = await oai.chat.completions.create({
+ model,
+ messages: [
+ {
+ role: "assistant",
+ content:
+ "You are an expert coding assistant. Your goal is to generate a concise, structured summary of the conversation below that captures all essential information needed to continue development after context replacement. Include tasks performed, code areas modified or reviewed, key decisions or assumptions, test results or errors, and outstanding tasks or next steps.",
+ },
+ {
+ role: "user",
+ content: `Here is the conversation so far:\n${conversationText}\n\nPlease summarize this conversation, covering:\n1. Tasks performed and outcomes\n2. Code files, modules, or functions modified or examined\n3. Important decisions or assumptions made\n4. Errors encountered and test or build results\n5. Remaining tasks, open questions, or next steps\nProvide the summary in a clear, concise format.`,
+ },
+ ],
+ });
+ return response.choices[0]?.message.content ?? "Unable to generate summary.";
+}
diff --git a/codex-cli/tests/terminal-chat-input-compact.test.tsx b/codex-cli/tests/terminal-chat-input-compact.test.tsx
new file mode 100644
index 00000000..194a61ca
--- /dev/null
+++ b/codex-cli/tests/terminal-chat-input-compact.test.tsx
@@ -0,0 +1,31 @@
+import React from "react";
+import type { ComponentProps } from "react";
+import { renderTui } from "./ui-test-helpers.js";
+import TerminalChatInput from "../src/components/chat/terminal-chat-input.js";
+import { describe, it, expect } from "vitest";
+
+describe("TerminalChatInput compact command", () => {
+ it("shows /compact hint when context is low", async () => {
+ const props: ComponentProps = {
+ isNew: false,
+ loading: false,
+ submitInput: () => {},
+ confirmationPrompt: null,
+ explanation: undefined,
+ submitConfirmation: () => {},
+ setLastResponseId: () => {},
+ setItems: () => {},
+ contextLeftPercent: 10,
+ openOverlay: () => {},
+ openModelOverlay: () => {},
+ openApprovalOverlay: () => {},
+ openHelpOverlay: () => {},
+ onCompact: () => {},
+ interruptAgent: () => {},
+ active: true,
+ };
+ const { lastFrameStripped } = renderTui();
+ const frame = lastFrameStripped();
+ expect(frame).toContain("/compact");
+ });
+});