In order to to this, I created a new `chatgpt` crate where we can put any code that interacts directly with ChatGPT as opposed to the OpenAI API. I added a disclaimer to the README for it that it should primarily be modified by OpenAI employees. https://github.com/user-attachments/assets/bb978e33-d2c9-4d8e-af28-c8c25b1988e8
41 lines
891 B
Rust
41 lines
891 B
Rust
use codex_core::config::Config;
|
|
use serde::Deserialize;
|
|
|
|
use crate::chatgpt_client::chatgpt_get_request;
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
pub struct GetTaskResponse {
|
|
pub current_diff_task_turn: Option<AssistantTurn>,
|
|
}
|
|
|
|
// Only relevant fields for our extraction
|
|
#[derive(Debug, Deserialize)]
|
|
pub struct AssistantTurn {
|
|
pub output_items: Vec<OutputItem>,
|
|
}
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
#[serde(tag = "type")]
|
|
pub enum OutputItem {
|
|
#[serde(rename = "pr")]
|
|
Pr(PrOutputItem),
|
|
|
|
#[serde(other)]
|
|
Other,
|
|
}
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
pub struct PrOutputItem {
|
|
pub output_diff: OutputDiff,
|
|
}
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
pub struct OutputDiff {
|
|
pub diff: String,
|
|
}
|
|
|
|
pub(crate) async fn get_task(config: &Config, task_id: String) -> anyhow::Result<GetTaskResponse> {
|
|
let path = format!("/wham/tasks/{task_id}");
|
|
chatgpt_get_request(config, path).await
|
|
}
|