In order to to this, I created a new `chatgpt` crate where we can put any code that interacts directly with ChatGPT as opposed to the OpenAI API. I added a disclaimer to the README for it that it should primarily be modified by OpenAI employees. https://github.com/user-attachments/assets/bb978e33-d2c9-4d8e-af28-c8c25b1988e8
25 lines
712 B
Rust
25 lines
712 B
Rust
use std::path::Path;
|
|
use std::sync::LazyLock;
|
|
use std::sync::RwLock;
|
|
|
|
use codex_login::TokenData;
|
|
|
|
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));
|
|
|
|
pub fn get_chatgpt_token_data() -> Option<TokenData> {
|
|
CHATGPT_TOKEN.read().ok()?.clone()
|
|
}
|
|
|
|
pub fn set_chatgpt_token_data(value: TokenData) {
|
|
if let Ok(mut guard) = CHATGPT_TOKEN.write() {
|
|
*guard = Some(value);
|
|
}
|
|
}
|
|
|
|
/// Initialize the ChatGPT token from auth.json file
|
|
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
|
let auth_json = codex_login::try_read_auth_json(codex_home).await?;
|
|
set_chatgpt_token_data(auth_json.tokens.clone());
|
|
Ok(())
|
|
}
|