From 8e2e77fafb5f3b166622c0cae11fc1143ced276a Mon Sep 17 00:00:00 2001 From: Fouad Matin <169186268+fouad-openai@users.noreply.github.com> Date: Fri, 18 Apr 2025 14:09:35 -0700 Subject: [PATCH] feat: add /bug report command (#312) Add `/bug` command for chat session --- .github/ISSUE_TEMPLATE/2-bug-report.yml | 1 + .../components/chat/terminal-chat-input.tsx | 66 +++++++++++++++ .../src/components/chat/terminal-chat.tsx | 1 + codex-cli/src/components/help-overlay.tsx | 3 + codex-cli/src/utils/bug-report.ts | 81 +++++++++++++++++++ 5 files changed, 152 insertions(+) create mode 100644 codex-cli/src/utils/bug-report.ts diff --git a/.github/ISSUE_TEMPLATE/2-bug-report.yml b/.github/ISSUE_TEMPLATE/2-bug-report.yml index 11476d50..9db7f2ef 100644 --- a/.github/ISSUE_TEMPLATE/2-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/2-bug-report.yml @@ -26,6 +26,7 @@ body: label: Which model were you using? description: Like `gpt-4.1`, `o4-mini`, `o3`, etc. - type: input + id: platform attributes: label: What platform is your computer? description: | diff --git a/codex-cli/src/components/chat/terminal-chat-input.tsx b/codex-cli/src/components/chat/terminal-chat-input.tsx index ad7ad39a..fda19b6b 100644 --- a/codex-cli/src/components/chat/terminal-chat-input.tsx +++ b/codex-cli/src/components/chat/terminal-chat-input.tsx @@ -45,6 +45,7 @@ export default function TerminalChatInput({ onCompact, interruptAgent, active, + items = [], }: { isNew: boolean; loading: boolean; @@ -65,6 +66,8 @@ export default function TerminalChatInput({ onCompact: () => void; interruptAgent: () => void; active: boolean; + // New: current conversation items so we can include them in bug reports + items?: Array; }): React.ReactElement { const app = useApp(); const [selectedSuggestion, setSelectedSuggestion] = useState(0); @@ -239,6 +242,68 @@ export default function TerminalChatInput({ }, ); + return; + } else if (inputValue === "/bug") { + // Generate a GitHub bug report URL pre‑filled with session details + setInput(""); + + try { + // Dynamically import dependencies to avoid unnecessary bundle size + const [{ default: open }, os] = await Promise.all([ + import("open"), + import("node:os"), + ]); + + // Lazy import CLI_VERSION to avoid circular deps + const { CLI_VERSION } = await import("../../utils/session.js"); + + const { buildBugReportUrl } = await import( + "../../utils/bug-report.js" + ); + + const url = buildBugReportUrl({ + items: items ?? [], + cliVersion: CLI_VERSION, + model: loadConfig().model ?? "unknown", + platform: `${os.platform()} ${os.arch()} ${os.release()}`, + }); + + // Open the URL in the user's default browser + await open(url, { wait: false }); + + // Inform the user in the chat history + setItems((prev) => [ + ...prev, + { + id: `bugreport-${Date.now()}`, + type: "message", + role: "system", + content: [ + { + type: "input_text", + text: "📋 Opened browser to file a bug report. Please include any context that might help us fix the issue!", + }, + ], + }, + ]); + } catch (error) { + // If anything went wrong, notify the user + setItems((prev) => [ + ...prev, + { + id: `bugreport-error-${Date.now()}`, + type: "message", + role: "system", + content: [ + { + type: "input_text", + text: `⚠️ Failed to create bug report URL: ${error}`, + }, + ], + }, + ]); + } + return; } else if (inputValue.startsWith("/")) { // Handle invalid/unrecognized commands. @@ -330,6 +395,7 @@ export default function TerminalChatInput({ openHelpOverlay, history, // Add history to the dependency array onCompact, + items, ], ); diff --git a/codex-cli/src/components/chat/terminal-chat.tsx b/codex-cli/src/components/chat/terminal-chat.tsx index 50ee4479..1cfeffe1 100644 --- a/codex-cli/src/components/chat/terminal-chat.tsx +++ b/codex-cli/src/components/chat/terminal-chat.tsx @@ -516,6 +516,7 @@ export default function TerminalChat({ agent.run(inputs, lastResponseId || ""); return {}; }} + items={items} /> )} {overlayMode === "history" && ( diff --git a/codex-cli/src/components/help-overlay.tsx b/codex-cli/src/components/help-overlay.tsx index 9feabc16..132add83 100644 --- a/codex-cli/src/components/help-overlay.tsx +++ b/codex-cli/src/components/help-overlay.tsx @@ -52,6 +52,9 @@ export default function HelpOverlay({ /clearhistory – clear command history + + /bug – file a bug report with session log + /compact – condense context into a summary diff --git a/codex-cli/src/utils/bug-report.ts b/codex-cli/src/utils/bug-report.ts new file mode 100644 index 00000000..0fbd0329 --- /dev/null +++ b/codex-cli/src/utils/bug-report.ts @@ -0,0 +1,81 @@ +import type { ResponseItem } from "openai/resources/responses/responses.mjs"; + +/** + * Build a GitHub issues‐new URL that pre‑fills the Codex 2‑bug‑report.yml + * template with whatever structured data we can infer from the current + * session. + */ +export function buildBugReportUrl({ + items, + cliVersion, + model, + platform, +}: { + /** Chat history so we can summarise user steps */ + items: Array; + /** CLI revision string (e.g. output of `codex --revision`) */ + cliVersion: string; + /** Active model name */ + model: string; + /** Platform string – e.g. `darwin arm64 23.0.0` */ + platform: string; +}): string { + const params = new URLSearchParams({ + template: "2-bug-report.yml", + labels: "bug", + }); + + // Template ids ------------------------------------------------------------- + params.set("version", cliVersion); + params.set("model", model); + + // The platform input has no explicit `id`, so GitHub falls back to a slug of + // the label text. For “What platform is your computer?” that slug is: + // what-platform-is-your-computer + params.set("what-platform-is-your-computer", platform); + + // Build the steps bullet list --------------------------------------------- + const bullets: Array = []; + for (let i = 0; i < items.length; ) { + const entry = items[i]; + if (entry?.type === "message" && entry.role === "user") { + const contentArray = entry.content as + | Array<{ text?: string }> + | undefined; + const messageText = contentArray + ?.map((c) => c.text ?? "") + .join(" ") + .trim(); + + let reasoning = 0; + let toolCalls = 0; + let j = i + 1; + while ( + j < items.length && + !(entry?.type === "message" && entry.role === "user") + ) { + const it = items[j]; + if (it?.type === "message" && it?.role === "assistant") { + reasoning += 1; + } else if (it?.type === "function_call") { + toolCalls += 1; + } + j++; + } + + bullets.push( + `- "${messageText}"\n - \`${reasoning} reasoning steps\` | \`${toolCalls} tool calls\``, + ); + + i = j; + } else { + i += 1; + } + } + + if (bullets.length) { + params.set("steps", bullets.join("\n")); + } + + return `https://github.com/openai/codex/issues/new?${params.toString()}`; +}