Add codex apply to apply a patch created from the Codex remote agent (#1528)

In order to to this, I created a new `chatgpt` crate where we can put
any code that interacts directly with ChatGPT as opposed to the OpenAI
API. I added a disclaimer to the README for it that it should primarily
be modified by OpenAI employees.


https://github.com/user-attachments/assets/bb978e33-d2c9-4d8e-af28-c8c25b1988e8
This commit is contained in:
Gabriel Peal
2025-07-11 13:30:11 -04:00
committed by GitHub
parent 9e58076cf5
commit bfeb8c92a5
16 changed files with 559 additions and 16 deletions

View File

@@ -0,0 +1,89 @@
use clap::Parser;
use codex_common::CliConfigOverrides;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use crate::chatgpt_token::init_chatgpt_token_from_auth;
use crate::get_task::GetTaskResponse;
use crate::get_task::OutputItem;
use crate::get_task::PrOutputItem;
use crate::get_task::get_task;
/// Applies the latest diff from a Codex agent task.
#[derive(Debug, Parser)]
pub struct ApplyCommand {
pub task_id: String,
#[clap(flatten)]
pub config_overrides: CliConfigOverrides,
}
pub async fn run_apply_command(apply_cli: ApplyCommand) -> anyhow::Result<()> {
let config = Config::load_with_cli_overrides(
apply_cli
.config_overrides
.parse_overrides()
.map_err(anyhow::Error::msg)?,
ConfigOverrides::default(),
)?;
init_chatgpt_token_from_auth(&config.codex_home).await?;
let task_response = get_task(&config, apply_cli.task_id).await?;
apply_diff_from_task(task_response).await
}
pub async fn apply_diff_from_task(task_response: GetTaskResponse) -> anyhow::Result<()> {
let diff_turn = match task_response.current_diff_task_turn {
Some(turn) => turn,
None => anyhow::bail!("No diff turn found"),
};
let output_diff = diff_turn.output_items.iter().find_map(|item| match item {
OutputItem::Pr(PrOutputItem { output_diff }) => Some(output_diff),
_ => None,
});
match output_diff {
Some(output_diff) => apply_diff(&output_diff.diff).await,
None => anyhow::bail!("No PR output item found"),
}
}
async fn apply_diff(diff: &str) -> anyhow::Result<()> {
let toplevel_output = tokio::process::Command::new("git")
.args(vec!["rev-parse", "--show-toplevel"])
.output()
.await?;
if !toplevel_output.status.success() {
anyhow::bail!("apply must be run from a git repository.");
}
let repo_root = String::from_utf8(toplevel_output.stdout)?
.trim()
.to_string();
let mut git_apply_cmd = tokio::process::Command::new("git")
.args(vec!["apply", "--3way"])
.current_dir(&repo_root)
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()?;
if let Some(mut stdin) = git_apply_cmd.stdin.take() {
tokio::io::AsyncWriteExt::write_all(&mut stdin, diff.as_bytes()).await?;
drop(stdin);
}
let output = git_apply_cmd.wait_with_output().await?;
if !output.status.success() {
anyhow::bail!(
"Git apply failed with status {}: {}",
output.status,
String::from_utf8_lossy(&output.stderr)
);
}
println!("Successfully applied diff");
Ok(())
}

View File

@@ -0,0 +1,45 @@
use codex_core::config::Config;
use crate::chatgpt_token::get_chatgpt_token_data;
use crate::chatgpt_token::init_chatgpt_token_from_auth;
use anyhow::Context;
use serde::de::DeserializeOwned;
/// Make a GET request to the ChatGPT backend API.
pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
config: &Config,
path: String,
) -> anyhow::Result<T> {
let chatgpt_base_url = &config.chatgpt_base_url;
init_chatgpt_token_from_auth(&config.codex_home).await?;
// Make direct HTTP request to ChatGPT backend API with the token
let client = reqwest::Client::new();
let url = format!("{chatgpt_base_url}{path}");
let token =
get_chatgpt_token_data().ok_or_else(|| anyhow::anyhow!("ChatGPT token not available"))?;
let response = client
.get(&url)
.bearer_auth(&token.access_token)
.header("chatgpt-account-id", &token.account_id)
.header("Content-Type", "application/json")
.header("User-Agent", "codex-cli")
.send()
.await
.context("Failed to send request")?;
if response.status().is_success() {
let result: T = response
.json()
.await
.context("Failed to parse JSON response")?;
Ok(result)
} else {
let status = response.status();
let body = response.text().await.unwrap_or_default();
anyhow::bail!("Request failed with status {}: {}", status, body)
}
}

View File

@@ -0,0 +1,24 @@
use std::path::Path;
use std::sync::LazyLock;
use std::sync::RwLock;
use codex_login::TokenData;
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));
pub fn get_chatgpt_token_data() -> Option<TokenData> {
CHATGPT_TOKEN.read().ok()?.clone()
}
pub fn set_chatgpt_token_data(value: TokenData) {
if let Ok(mut guard) = CHATGPT_TOKEN.write() {
*guard = Some(value);
}
}
/// Initialize the ChatGPT token from auth.json file
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
let auth_json = codex_login::try_read_auth_json(codex_home).await?;
set_chatgpt_token_data(auth_json.tokens.clone());
Ok(())
}

View File

@@ -0,0 +1,40 @@
use codex_core::config::Config;
use serde::Deserialize;
use crate::chatgpt_client::chatgpt_get_request;
#[derive(Debug, Deserialize)]
pub struct GetTaskResponse {
pub current_diff_task_turn: Option<AssistantTurn>,
}
// Only relevant fields for our extraction
#[derive(Debug, Deserialize)]
pub struct AssistantTurn {
pub output_items: Vec<OutputItem>,
}
#[derive(Debug, Deserialize)]
#[serde(tag = "type")]
pub enum OutputItem {
#[serde(rename = "pr")]
Pr(PrOutputItem),
#[serde(other)]
Other,
}
#[derive(Debug, Deserialize)]
pub struct PrOutputItem {
pub output_diff: OutputDiff,
}
#[derive(Debug, Deserialize)]
pub struct OutputDiff {
pub diff: String,
}
pub(crate) async fn get_task(config: &Config, task_id: String) -> anyhow::Result<GetTaskResponse> {
let path = format!("/wham/tasks/{task_id}");
chatgpt_get_request(config, path).await
}

View File

@@ -0,0 +1,4 @@
pub mod apply_command;
mod chatgpt_client;
mod chatgpt_token;
pub mod get_task;