feat: add support for login with ChatGPT (#1212)
This does not implement the full Login with ChatGPT experience, but it
should unblock people.
**What works**
* The `codex` multitool now has a `login` subcommand, so you can run
`codex login`, which should write `CODEX_HOME/auth.json` if you complete
the flow successfully. The TUI will now read the `OPENAI_API_KEY` from
`auth.json`.
* The TUI should refresh the token if it has expired and the necessary
information is in `auth.json`.
* There is a `LoginScreen` in the TUI that tells you to run `codex
login` if both (1) your model provider expects to use `OPENAI_API_KEY`
as its env var, and (2) `OPENAI_API_KEY` is not set.
**What does not work**
* The `LoginScreen` does not support the login flow from within the TUI.
Instead, it tells you to quit, run `codex login`, and then run `codex`
again.
* `codex exec` does read from `auth.json` yet, nor does it direct the
user to go through the login flow if `OPENAI_API_KEY` is not be found.
* The `maybeRedeemCredits()` function from `get-api-key.tsx` has not
been ported from TypeScript to `login_with_chatgpt.py` yet:
a67a67f325/codex-cli/src/utils/get-api-key.tsx (L84-L89)
**Implementation**
Currently, the OAuth flow requires running a local webserver on
`127.0.0.1:1455`. It seemed wasteful to incur the additional binary cost
of a webserver dependency in the Rust CLI just to support login, so
instead we implement this logic in Python, as Python has a `http.server`
module as part of its standard library. Specifically, we bundle the
contents of a single Python file as a string in the Rust CLI and then
use it to spawn a subprocess as `python3 -c
{{SOURCE_FOR_PYTHON_SERVER}}`.
As such, the most significant files in this PR are:
```
codex-rs/login/src/login_with_chatgpt.py
codex-rs/login/src/lib.rs
```
Now that the CLI may load `OPENAI_API_KEY` from the environment _or_
`CODEX_HOME/auth.json`, we need a new abstraction for reading/writing
this variable, so we introduce:
```
codex-rs/core/src/openai_api_key.rs
```
Note that `std::env::set_var()` is [rightfully] `unsafe` in Rust 2024,
so we use a LazyLock<RwLock<Option<String>>> to store `OPENAI_API_KEY`
so it is read in a thread-safe manner.
Ultimately, it should be possible to go through the entire login flow
from the TUI. This PR introduces a placeholder `LoginScreen` UI for that
right now, though the new `codex login` subcommand introduced in this PR
should be a viable workaround until the UI is ready.
**Testing**
Because the login flow is currently implemented in a standalone Python
file, you can test it without building any Rust code as follows:
```
rm -rf /tmp/codex_home && mkdir /tmp/codex_home
CODEX_HOME=/tmp/codex_home python3 codex-rs/login/src/login_with_chatgpt.py
```
For reference:
* the original TypeScript implementation was introduced in
https://github.com/openai/codex/pull/963
* support for redeeming credits was later added in
https://github.com/openai/codex/pull/974
This commit is contained in:
@@ -382,6 +382,8 @@ async function handleCallback(
|
||||
|
||||
const exchanged = (await exchangeRes.json()) as {
|
||||
access_token: string;
|
||||
// NOTE(mbolin): I did not see the "key" property set in practice. Note
|
||||
// this property is not read by the code.
|
||||
key: string;
|
||||
};
|
||||
|
||||
|
||||
14
codex-rs/Cargo.lock
generated
14
codex-rs/Cargo.lock
generated
@@ -585,6 +585,7 @@ dependencies = [
|
||||
"codex-core",
|
||||
"codex-exec",
|
||||
"codex-linux-sandbox",
|
||||
"codex-login",
|
||||
"codex-mcp-server",
|
||||
"codex-tui",
|
||||
"serde_json",
|
||||
@@ -613,6 +614,7 @@ dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"codex-apply-patch",
|
||||
"codex-login",
|
||||
"codex-mcp-client",
|
||||
"dirs",
|
||||
"env-flags",
|
||||
@@ -704,6 +706,17 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-login"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-mcp-client"
|
||||
version = "0.0.0"
|
||||
@@ -747,6 +760,7 @@ dependencies = [
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-linux-sandbox",
|
||||
"codex-login",
|
||||
"color-eyre",
|
||||
"crossterm",
|
||||
"image",
|
||||
|
||||
@@ -9,6 +9,7 @@ members = [
|
||||
"exec",
|
||||
"execpolicy",
|
||||
"linux-sandbox",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
|
||||
@@ -20,6 +20,7 @@ clap = { version = "4", features = ["derive"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-exec = { path = "../exec" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
codex-mcp-server = { path = "../mcp-server" }
|
||||
codex-tui = { path = "../tui" }
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub mod debug_sandbox;
|
||||
mod exit_status;
|
||||
pub mod login;
|
||||
pub mod proto;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
35
codex-rs/cli/src/login.rs
Normal file
35
codex-rs/cli/src/login.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::login_with_chatgpt;
|
||||
|
||||
pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let cli_overrides = match cli_config_overrides.parse_overrides() {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
eprintln!("Error parsing -c overrides: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let config_overrides = ConfigOverrides::default();
|
||||
let config = match Config::load_with_cli_overrides(cli_overrides, config_overrides) {
|
||||
Ok(config) => config,
|
||||
Err(e) => {
|
||||
eprintln!("Error loading configuration: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let capture_output = false;
|
||||
match login_with_chatgpt(&config.codex_home, capture_output).await {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error logging in: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
use clap::Parser;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::proto;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
@@ -36,6 +37,9 @@ enum Subcommand {
|
||||
#[clap(visible_alias = "e")]
|
||||
Exec(ExecCli),
|
||||
|
||||
/// Login with ChatGPT.
|
||||
Login(LoginCommand),
|
||||
|
||||
/// Experimental: run Codex as an MCP server.
|
||||
Mcp,
|
||||
|
||||
@@ -63,7 +67,10 @@ enum DebugCommand {
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct ReplProto {}
|
||||
struct LoginCommand {
|
||||
#[clap(skip)]
|
||||
config_overrides: CliConfigOverrides,
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
codex_linux_sandbox::run_with_sandbox(|codex_linux_sandbox_exe| async move {
|
||||
@@ -88,6 +95,10 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
Some(Subcommand::Mcp) => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Login(mut login_cli)) => {
|
||||
prepend_config_flags(&mut login_cli.config_overrides, cli.config_overrides);
|
||||
run_login_with_chatgpt(login_cli.config_overrides).await;
|
||||
}
|
||||
Some(Subcommand::Proto(mut proto_cli)) => {
|
||||
prepend_config_flags(&mut proto_cli.config_overrides, cli.config_overrides);
|
||||
proto::run_main(proto_cli).await?;
|
||||
|
||||
@@ -16,6 +16,7 @@ async-channel = "2.3.1"
|
||||
base64 = "0.21"
|
||||
bytes = "1.10.1"
|
||||
codex-apply-patch = { path = "../apply-patch" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-mcp-client = { path = "../mcp-client" }
|
||||
dirs = "6"
|
||||
env-flags = "0.1.1"
|
||||
|
||||
@@ -27,6 +27,7 @@ mod model_provider_info;
|
||||
pub use model_provider_info::ModelProviderInfo;
|
||||
pub use model_provider_info::WireApi;
|
||||
mod models;
|
||||
pub mod openai_api_key;
|
||||
mod openai_tools;
|
||||
mod project_doc;
|
||||
pub mod protocol;
|
||||
|
||||
@@ -11,6 +11,7 @@ use std::collections::HashMap;
|
||||
use std::env::VarError;
|
||||
|
||||
use crate::error::EnvVarError;
|
||||
use crate::openai_api_key::get_openai_api_key;
|
||||
|
||||
/// Wire protocol that the provider speaks. Most third-party services only
|
||||
/// implement the classic OpenAI Chat Completions JSON schema, whereas OpenAI
|
||||
@@ -52,7 +53,13 @@ impl ModelProviderInfo {
|
||||
/// cannot be found, returns an error.
|
||||
pub fn api_key(&self) -> crate::error::Result<Option<String>> {
|
||||
match &self.env_key {
|
||||
Some(env_key) => std::env::var(env_key)
|
||||
Some(env_key) => {
|
||||
let env_value = if env_key == crate::openai_api_key::OPENAI_API_KEY_ENV_VAR {
|
||||
get_openai_api_key().map_or_else(|| Err(VarError::NotPresent), Ok)
|
||||
} else {
|
||||
std::env::var(env_key)
|
||||
};
|
||||
env_value
|
||||
.and_then(|v| {
|
||||
if v.trim().is_empty() {
|
||||
Err(VarError::NotPresent)
|
||||
@@ -65,7 +72,8 @@ impl ModelProviderInfo {
|
||||
var: env_key.clone(),
|
||||
instructions: self.env_key_instructions.clone(),
|
||||
})
|
||||
}),
|
||||
})
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
24
codex-rs/core/src/openai_api_key.rs
Normal file
24
codex-rs/core/src/openai_api_key.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use std::env;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
|
||||
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
||||
|
||||
static OPENAI_API_KEY: LazyLock<RwLock<Option<String>>> = LazyLock::new(|| {
|
||||
let val = env::var(OPENAI_API_KEY_ENV_VAR)
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
RwLock::new(val)
|
||||
});
|
||||
|
||||
pub fn get_openai_api_key() -> Option<String> {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
OPENAI_API_KEY.read().unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn set_openai_api_key(value: String) {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
if !value.is_empty() {
|
||||
*OPENAI_API_KEY.write().unwrap() = Some(value);
|
||||
}
|
||||
}
|
||||
20
codex-rs/login/Cargo.toml
Normal file
20
codex-rs/login/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "codex-login"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
168
codex-rs/login/src/lib.rs
Normal file
168
codex-rs/login/src/lib.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
|
||||
const SOURCE_FOR_PYTHON_SERVER: &str = include_str!("./login_with_chatgpt.py");
|
||||
|
||||
const CLIENT_ID: &str = "app_EMoamEEZ73f0CkXaXp7hrann";
|
||||
|
||||
/// Run `python3 -c {{SOURCE_FOR_PYTHON_SERVER}}` with the CODEX_HOME
|
||||
/// environment variable set to the provided `codex_home` path. If the
|
||||
/// subprocess exits 0, read the OPENAI_API_KEY property out of
|
||||
/// CODEX_HOME/auth.json and return Ok(OPENAI_API_KEY). Otherwise, return Err
|
||||
/// with any information from the subprocess.
|
||||
///
|
||||
/// If `capture_output` is true, the subprocess's output will be captured and
|
||||
/// recorded in memory. Otherwise, the subprocess's output will be sent to the
|
||||
/// current process's stdout/stderr.
|
||||
pub async fn login_with_chatgpt(
|
||||
codex_home: &Path,
|
||||
capture_output: bool,
|
||||
) -> std::io::Result<String> {
|
||||
let child = Command::new("python3")
|
||||
.arg("-c")
|
||||
.arg(SOURCE_FOR_PYTHON_SERVER)
|
||||
.env("CODEX_HOME", codex_home)
|
||||
.stdin(Stdio::null())
|
||||
.stdout(if capture_output {
|
||||
Stdio::piped()
|
||||
} else {
|
||||
Stdio::inherit()
|
||||
})
|
||||
.stderr(if capture_output {
|
||||
Stdio::piped()
|
||||
} else {
|
||||
Stdio::inherit()
|
||||
})
|
||||
.spawn()?;
|
||||
|
||||
let output = child.wait_with_output().await?;
|
||||
if output.status.success() {
|
||||
try_read_openai_api_key(codex_home).await
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
Err(std::io::Error::other(format!(
|
||||
"login_with_chatgpt subprocess failed: {stderr}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to read the `OPENAI_API_KEY` from the `auth.json` file in the given
|
||||
/// `CODEX_HOME` directory, refreshing it, if necessary.
|
||||
pub async fn try_read_openai_api_key(codex_home: &Path) -> std::io::Result<String> {
|
||||
let auth_path = codex_home.join("auth.json");
|
||||
let mut file = std::fs::File::open(&auth_path)?;
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let auth_dot_json: AuthDotJson = serde_json::from_str(&contents)?;
|
||||
|
||||
if is_expired(&auth_dot_json) {
|
||||
let refresh_response = try_refresh_token(&auth_dot_json).await?;
|
||||
let mut auth_dot_json = auth_dot_json;
|
||||
auth_dot_json.tokens.id_token = refresh_response.id_token;
|
||||
if let Some(refresh_token) = refresh_response.refresh_token {
|
||||
auth_dot_json.tokens.refresh_token = refresh_token;
|
||||
}
|
||||
auth_dot_json.last_refresh = Utc::now();
|
||||
|
||||
let mut options = OpenOptions::new();
|
||||
options.write(true).create(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
options.mode(0o600);
|
||||
}
|
||||
|
||||
let json_data = serde_json::to_string(&auth_dot_json)?;
|
||||
{
|
||||
let mut file = options.open(&auth_path)?;
|
||||
file.write_all(json_data.as_bytes())?;
|
||||
file.flush()?;
|
||||
}
|
||||
|
||||
Ok(auth_dot_json.openai_api_key)
|
||||
} else {
|
||||
Ok(auth_dot_json.openai_api_key)
|
||||
}
|
||||
}
|
||||
|
||||
fn is_expired(auth_dot_json: &AuthDotJson) -> bool {
|
||||
let last_refresh = auth_dot_json.last_refresh;
|
||||
last_refresh < Utc::now() - chrono::Duration::days(28)
|
||||
}
|
||||
|
||||
async fn try_refresh_token(auth_dot_json: &AuthDotJson) -> std::io::Result<RefreshResponse> {
|
||||
let refresh_request = RefreshRequest {
|
||||
client_id: CLIENT_ID,
|
||||
grant_type: "refresh_token",
|
||||
refresh_token: auth_dot_json.tokens.refresh_token.clone(),
|
||||
scope: "openid profile email",
|
||||
};
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let response = client
|
||||
.post("https://auth.openai.com/oauth/token")
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&refresh_request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let refresh_response = response
|
||||
.json::<RefreshResponse>()
|
||||
.await
|
||||
.map_err(std::io::Error::other)?;
|
||||
Ok(refresh_response)
|
||||
} else {
|
||||
Err(std::io::Error::other(format!(
|
||||
"Failed to refresh token: {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RefreshRequest {
|
||||
client_id: &'static str,
|
||||
grant_type: &'static str,
|
||||
refresh_token: String,
|
||||
scope: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct RefreshResponse {
|
||||
id_token: String,
|
||||
refresh_token: Option<String>,
|
||||
}
|
||||
|
||||
/// Expected structure for $CODEX_HOME/auth.json.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct AuthDotJson {
|
||||
#[serde(rename = "OPENAI_API_KEY")]
|
||||
openai_api_key: String,
|
||||
|
||||
tokens: TokenData,
|
||||
|
||||
last_refresh: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct TokenData {
|
||||
/// This is a JWT.
|
||||
id_token: String,
|
||||
|
||||
/// This is a JWT.
|
||||
#[allow(dead_code)]
|
||||
access_token: String,
|
||||
|
||||
refresh_token: String,
|
||||
}
|
||||
624
codex-rs/login/src/login_with_chatgpt.py
Normal file
624
codex-rs/login/src/login_with_chatgpt.py
Normal file
@@ -0,0 +1,624 @@
|
||||
"""Script that spawns a local webserver for retrieving an OpenAI API key.
|
||||
|
||||
- Listens on 127.0.0.1:1455
|
||||
- Opens http://localhost:1455/auth/callback in the browser
|
||||
- If the user successfully navigates the auth flow,
|
||||
$CODEX_HOME/auth.json will be written with the API key.
|
||||
- User will be redirected to http://localhost:1455/success upon success.
|
||||
|
||||
The script should exit with a non-zero code if the user fails to navigate the
|
||||
auth flow.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import datetime
|
||||
import errno
|
||||
import hashlib
|
||||
import http.server
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
import sys
|
||||
import threading
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import webbrowser
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Required port for OAuth client.
|
||||
REQUIRED_PORT = 1455
|
||||
URL_BASE = f"http://localhost:{REQUIRED_PORT}"
|
||||
DEFAULT_ISSUER = "https://auth.openai.com"
|
||||
DEFAULT_CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann"
|
||||
|
||||
EXIT_CODE_WHEN_ADDRESS_ALREADY_IN_USE = 13
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenData:
|
||||
id_token: str
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class AuthBundle:
|
||||
"""Aggregates authentication data produced after successful OAuth flow."""
|
||||
|
||||
api_key: str
|
||||
token_data: TokenData
|
||||
last_refresh: str
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Retrieve API key via local HTTP flow")
|
||||
parser.add_argument(
|
||||
"--no-browser",
|
||||
action="store_true",
|
||||
help="Do not automatically open the browser",
|
||||
)
|
||||
parser.add_argument("--verbose", action="store_true", help="Enable request logging")
|
||||
args = parser.parse_args()
|
||||
|
||||
codex_home = os.environ.get("CODEX_HOME")
|
||||
if not codex_home:
|
||||
eprint("ERROR: CODEX_HOME environment variable is not set")
|
||||
sys.exit(1)
|
||||
|
||||
# Spawn server.
|
||||
try:
|
||||
httpd = _ApiKeyHTTPServer(
|
||||
("127.0.0.1", REQUIRED_PORT),
|
||||
_ApiKeyHTTPHandler,
|
||||
codex_home=codex_home,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
except OSError as e:
|
||||
eprint(f"ERROR: {e}")
|
||||
if e.errno == errno.EADDRINUSE:
|
||||
# Caller might want to handle this case specially.
|
||||
sys.exit(EXIT_CODE_WHEN_ADDRESS_ALREADY_IN_USE)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
auth_url = httpd.auth_url()
|
||||
|
||||
with httpd:
|
||||
eprint(f"Starting local login server on {URL_BASE}")
|
||||
if not args.no_browser:
|
||||
try:
|
||||
webbrowser.open(auth_url, new=1, autoraise=True)
|
||||
except Exception as e:
|
||||
eprint(f"Failed to open browser: {e}")
|
||||
|
||||
eprint(
|
||||
f"If your browser did not open, navigate to this URL to authenticate:\n\n{auth_url}"
|
||||
)
|
||||
|
||||
# Run the server in the main thread until `shutdown()` is called by the
|
||||
# request handler.
|
||||
try:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
eprint("\nKeyboard interrupt received, exiting.")
|
||||
|
||||
# Server has been shut down by the request handler. Exit with the code
|
||||
# it set (0 on success, non-zero on failure).
|
||||
sys.exit(httpd.exit_code)
|
||||
|
||||
|
||||
class _ApiKeyHTTPHandler(http.server.BaseHTTPRequestHandler):
|
||||
"""A minimal request handler that captures an *api key* from query/post."""
|
||||
|
||||
# We store the result in the server instance itself.
|
||||
server: "_ApiKeyHTTPServer" # type: ignore[override] - helpful annotation
|
||||
|
||||
def do_GET(self) -> None: # noqa: N802 – required by BaseHTTPRequestHandler
|
||||
path = urllib.parse.urlparse(self.path).path
|
||||
|
||||
if path == "/success":
|
||||
# Serve confirmation page then gracefully shut down the server so
|
||||
# the main thread can exit with the previously captured exit code.
|
||||
self._send_html(LOGIN_SUCCESS_HTML)
|
||||
|
||||
# Ensure the data is flushed to the client before we stop.
|
||||
try:
|
||||
self.wfile.flush()
|
||||
except Exception as e:
|
||||
eprint(f"Failed to flush response: {e}")
|
||||
|
||||
self.request_shutdown()
|
||||
elif path == "/auth/callback":
|
||||
query = urllib.parse.urlparse(self.path).query
|
||||
params = urllib.parse.parse_qs(query)
|
||||
|
||||
# Validate state -------------------------------------------------
|
||||
if params.get("state", [None])[0] != self.server.state:
|
||||
self.send_error(400, "State parameter mismatch")
|
||||
return
|
||||
|
||||
# Standard OAuth flow -----------------------------------------
|
||||
code = params.get("code", [None])[0]
|
||||
if not code:
|
||||
self.send_error(400, "Missing authorization code")
|
||||
return
|
||||
|
||||
try:
|
||||
auth_bundle, success_url = self._exchange_code_for_api_key(code)
|
||||
except Exception as exc: # noqa: BLE001 – propagate to client
|
||||
self.send_error(500, f"Token exchange failed: {exc}")
|
||||
return
|
||||
|
||||
# Persist API key along with additional token metadata.
|
||||
if _write_auth_file(
|
||||
auth=auth_bundle,
|
||||
codex_home=self.server.codex_home,
|
||||
):
|
||||
self.server.exit_code = 0
|
||||
self._send_redirect(success_url)
|
||||
else:
|
||||
self.send_error(500, "Unable to persist auth file")
|
||||
else:
|
||||
self.send_error(404, "Endpoint not supported")
|
||||
|
||||
def do_POST(self) -> None: # noqa: N802 – required by BaseHTTPRequestHandler
|
||||
self.send_error(404, "Endpoint not supported")
|
||||
|
||||
def send_error(self, code, message=None, explain=None) -> None:
|
||||
"""Send an error response and stop the server.
|
||||
|
||||
We avoid calling `sys.exit()` directly from the request-handling thread
|
||||
so that the response has a chance to be written to the socket. Instead
|
||||
we shut the server down; the main thread will then exit with the
|
||||
appropriate status code.
|
||||
"""
|
||||
super().send_error(code, message, explain)
|
||||
try:
|
||||
self.wfile.flush()
|
||||
except Exception as e:
|
||||
eprint(f"Failed to flush response: {e}")
|
||||
|
||||
self.request_shutdown()
|
||||
|
||||
def _send_redirect(self, url: str) -> None:
|
||||
self.send_response(302)
|
||||
self.send_header("Location", url)
|
||||
self.end_headers()
|
||||
|
||||
def _send_html(self, body: str) -> None:
|
||||
encoded = body.encode()
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "text/html; charset=utf-8")
|
||||
self.send_header("Content-Length", str(len(encoded)))
|
||||
self.end_headers()
|
||||
self.wfile.write(encoded)
|
||||
|
||||
# Silence logging for cleanliness unless --verbose flag is used.
|
||||
def log_message(self, fmt: str, *args): # type: ignore[override]
|
||||
if getattr(self.server, "verbose", False): # type: ignore[attr-defined]
|
||||
super().log_message(fmt, *args)
|
||||
|
||||
def _exchange_code_for_api_key(self, code: str) -> tuple[AuthBundle, str]:
|
||||
"""Perform token + token-exchange to obtain an OpenAI API key.
|
||||
|
||||
Returns (AuthBundle, success_url).
|
||||
"""
|
||||
|
||||
token_endpoint = f"{self.server.issuer}/oauth/token"
|
||||
|
||||
# 1. Authorization-code -> (id_token, access_token, refresh_token)
|
||||
data = urllib.parse.urlencode(
|
||||
{
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
"redirect_uri": self.server.redirect_uri,
|
||||
"client_id": self.server.client_id,
|
||||
"code_verifier": self.server.pkce.code_verifier,
|
||||
}
|
||||
).encode()
|
||||
|
||||
token_data: TokenData
|
||||
|
||||
with urllib.request.urlopen(
|
||||
urllib.request.Request(
|
||||
token_endpoint,
|
||||
data=data,
|
||||
method="POST",
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
) as resp:
|
||||
payload = json.loads(resp.read().decode())
|
||||
token_data = TokenData(
|
||||
id_token=payload["id_token"],
|
||||
access_token=payload["access_token"],
|
||||
refresh_token=payload["refresh_token"],
|
||||
)
|
||||
|
||||
id_token_parts = token_data.id_token.split(".")
|
||||
if len(id_token_parts) != 3:
|
||||
raise ValueError("Invalid ID token")
|
||||
access_token_parts = token_data.access_token.split(".")
|
||||
if len(access_token_parts) != 3:
|
||||
raise ValueError("Invalid access token")
|
||||
|
||||
id_token_claims = json.loads(
|
||||
base64.urlsafe_b64decode(id_token_parts[1] + "==").decode("utf-8")
|
||||
)
|
||||
access_token_claims = json.loads(
|
||||
base64.urlsafe_b64decode(access_token_parts[1] + "==").decode("utf-8")
|
||||
)
|
||||
|
||||
token_claims = id_token_claims.get("https://api.openai.com/auth", {})
|
||||
access_claims = access_token_claims.get("https://api.openai.com/auth", {})
|
||||
|
||||
org_id = token_claims.get("organization_id")
|
||||
if not org_id:
|
||||
raise ValueError("Missing organization in id_token claims")
|
||||
|
||||
project_id = token_claims.get("project_id")
|
||||
if not project_id:
|
||||
raise ValueError("Missing project in id_token claims")
|
||||
|
||||
random_id = secrets.token_hex(6)
|
||||
|
||||
# 2. Token exchange to obtain API key
|
||||
today = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d")
|
||||
exchange_data = urllib.parse.urlencode(
|
||||
{
|
||||
"grant_type": "urn:ietf:params:oauth:grant-type:token-exchange",
|
||||
"client_id": self.server.client_id,
|
||||
"requested_token": "openai-api-key",
|
||||
"subject_token": token_data.id_token,
|
||||
"subject_token_type": "urn:ietf:params:oauth:token-type:id_token",
|
||||
"name": f"Codex CLI [auto-generated] ({today}) [{random_id}]",
|
||||
}
|
||||
).encode()
|
||||
|
||||
exchanged_access_token: str
|
||||
with urllib.request.urlopen(
|
||||
urllib.request.Request(
|
||||
token_endpoint,
|
||||
data=exchange_data,
|
||||
method="POST",
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
) as resp:
|
||||
exchange_payload = json.loads(resp.read().decode())
|
||||
exchanged_access_token = exchange_payload["access_token"]
|
||||
|
||||
# Determine whether the organization still requires additional
|
||||
# setup (e.g., adding a payment method) based on the ID-token
|
||||
# claim provided by the auth service.
|
||||
completed_onboarding = token_claims.get("completed_platform_onboarding") == True
|
||||
chatgpt_plan_type = access_claims.get("chatgpt_plan_type")
|
||||
is_org_owner = token_claims.get("is_org_owner") == True
|
||||
needs_setup = not completed_onboarding and is_org_owner
|
||||
|
||||
# Build the success URL on the same host/port as the callback and
|
||||
# include the required query parameters for the front-end page.
|
||||
success_url_query = {
|
||||
"id_token": token_data.id_token,
|
||||
"needs_setup": "true" if needs_setup else "false",
|
||||
"org_id": org_id,
|
||||
"project_id": project_id,
|
||||
"plan_type": chatgpt_plan_type,
|
||||
"platform_url": (
|
||||
"https://platform.openai.com"
|
||||
if self.server.issuer == "https://auth.openai.com"
|
||||
else "https://platform.api.openai.org"
|
||||
),
|
||||
}
|
||||
success_url = f"{URL_BASE}/success?{urllib.parse.urlencode(success_url_query)}"
|
||||
|
||||
# TODO(mbolin): Port maybeRedeemCredits() to Python and call it here.
|
||||
|
||||
# Persist refresh_token/id_token for future use (redeem credits etc.)
|
||||
last_refresh_str = (
|
||||
datetime.datetime.now(datetime.timezone.utc)
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
auth_bundle = AuthBundle(
|
||||
api_key=exchanged_access_token,
|
||||
token_data=token_data,
|
||||
last_refresh=last_refresh_str,
|
||||
)
|
||||
|
||||
return (auth_bundle, success_url)
|
||||
|
||||
def request_shutdown(self) -> None:
|
||||
# shutdown() must be invoked from another thread to avoid
|
||||
# deadlocking the serve_forever() loop, which is running in this
|
||||
# same thread. A short-lived helper thread does the trick.
|
||||
threading.Thread(target=self.server.shutdown, daemon=True).start()
|
||||
|
||||
|
||||
def _write_auth_file(*, auth: AuthBundle, codex_home: str) -> bool:
|
||||
"""Persist *api_key* to $CODEX_HOME/auth.json.
|
||||
|
||||
Returns True on success, False otherwise. Any error is printed to
|
||||
*stderr* so that the Rust layer can surface the problem.
|
||||
"""
|
||||
if not os.path.isdir(codex_home):
|
||||
try:
|
||||
os.makedirs(codex_home, exist_ok=True)
|
||||
except Exception as exc: # pragma: no cover – unlikely
|
||||
eprint(f"ERROR: unable to create CODEX_HOME directory: {exc}")
|
||||
return False
|
||||
|
||||
auth_path = os.path.join(codex_home, "auth.json")
|
||||
auth_json_contents = {
|
||||
"OPENAI_API_KEY": auth.api_key,
|
||||
"tokens": {
|
||||
"id_token": auth.token_data.id_token,
|
||||
"access_token": auth.token_data.access_token,
|
||||
"refresh_token": auth.token_data.refresh_token,
|
||||
},
|
||||
"last_refresh": auth.last_refresh,
|
||||
}
|
||||
try:
|
||||
with open(auth_path, "w", encoding="utf-8") as fp:
|
||||
if hasattr(os, "fchmod"): # POSIX-safe
|
||||
os.fchmod(fp.fileno(), 0o600)
|
||||
json.dump(auth_json_contents, fp, indent=2)
|
||||
except Exception as exc: # pragma: no cover – permissions/filesystem
|
||||
eprint(f"ERROR: unable to write auth file: {exc}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@dataclass
|
||||
class PkceCodes:
|
||||
code_verifier: str
|
||||
code_challenge: str
|
||||
|
||||
|
||||
class _ApiKeyHTTPServer(http.server.HTTPServer):
|
||||
"""HTTPServer with shutdown helper & self-contained OAuth configuration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server_address: tuple[str, int],
|
||||
request_handler_class: type[http.server.BaseHTTPRequestHandler],
|
||||
*,
|
||||
codex_home: str,
|
||||
verbose: bool = False,
|
||||
) -> None:
|
||||
super().__init__(server_address, request_handler_class, bind_and_activate=True)
|
||||
|
||||
self.exit_code = 1
|
||||
self.codex_home = codex_home
|
||||
self.verbose: bool = verbose
|
||||
|
||||
self.issuer: str = DEFAULT_ISSUER
|
||||
self.client_id: str = DEFAULT_CLIENT_ID
|
||||
port = server_address[1]
|
||||
self.redirect_uri: str = f"http://localhost:{port}/auth/callback"
|
||||
self.pkce: PkceCodes = _generate_pkce()
|
||||
self.state: str = secrets.token_hex(32)
|
||||
|
||||
def auth_url(self) -> str:
|
||||
"""Return fully-formed OpenID authorization URL."""
|
||||
params = {
|
||||
"response_type": "code",
|
||||
"client_id": self.client_id,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"scope": "openid profile email offline_access",
|
||||
"code_challenge": self.pkce.code_challenge,
|
||||
"code_challenge_method": "S256",
|
||||
"id_token_add_organizations": "true",
|
||||
"state": self.state,
|
||||
}
|
||||
return f"{self.issuer}/oauth/authorize?" + urllib.parse.urlencode(params)
|
||||
|
||||
|
||||
def _generate_pkce() -> PkceCodes:
|
||||
"""Generate PKCE *code_verifier* and *code_challenge* (S256)."""
|
||||
code_verifier = secrets.token_hex(64)
|
||||
digest = hashlib.sha256(code_verifier.encode()).digest()
|
||||
code_challenge = base64.urlsafe_b64encode(digest).rstrip(b"=").decode()
|
||||
return PkceCodes(code_verifier, code_challenge)
|
||||
|
||||
|
||||
def eprint(*args, **kwargs) -> None:
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
LOGIN_SUCCESS_HTML = """<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>Sign into Codex CLI</title>
|
||||
<link rel="icon" href='data:image/svg+xml,%3Csvg xmlns="http://www.w3.org/2000/svg" width="32" height="32" fill="none" viewBox="0 0 32 32"%3E%3Cpath stroke="%23000" stroke-linecap="round" stroke-width="2.484" d="M22.356 19.797H17.17M9.662 12.29l1.979 3.576a.511.511 0 0 1-.005.504l-1.974 3.409M30.758 16c0 8.15-6.607 14.758-14.758 14.758-8.15 0-14.758-6.607-14.758-14.758C1.242 7.85 7.85 1.242 16 1.242c8.15 0 14.758 6.608 14.758 14.758Z"/%3E%3C/svg%3E' type="image/svg+xml">
|
||||
<style>
|
||||
.container {
|
||||
margin: auto;
|
||||
height: 100%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
position: relative;
|
||||
background: white;
|
||||
font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
|
||||
}
|
||||
.inner-container {
|
||||
width: 400px;
|
||||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 20px;
|
||||
display: inline-flex;
|
||||
}
|
||||
.content {
|
||||
align-self: stretch;
|
||||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 20px;
|
||||
display: flex;
|
||||
}
|
||||
.svg-wrapper {
|
||||
position: relative;
|
||||
}
|
||||
.title {
|
||||
text-align: center;
|
||||
color: var(--text-primary, #0D0D0D);
|
||||
font-size: 28px;
|
||||
font-weight: 400;
|
||||
line-height: 36.40px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
.setup-box {
|
||||
width: 600px;
|
||||
padding: 16px 20px;
|
||||
background: var(--bg-primary, white);
|
||||
box-shadow: 0px 4px 16px rgba(0, 0, 0, 0.05);
|
||||
border-radius: 16px;
|
||||
outline: 1px var(--border-default, rgba(13, 13, 13, 0.10)) solid;
|
||||
outline-offset: -1px;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
display: inline-flex;
|
||||
}
|
||||
.setup-content {
|
||||
flex: 1 1 0;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 24px;
|
||||
display: flex;
|
||||
}
|
||||
.setup-text {
|
||||
flex: 1 1 0;
|
||||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
align-items: flex-start;
|
||||
gap: 4px;
|
||||
display: inline-flex;
|
||||
}
|
||||
.setup-title {
|
||||
align-self: stretch;
|
||||
color: var(--text-primary, #0D0D0D);
|
||||
font-size: 14px;
|
||||
font-weight: 510;
|
||||
line-height: 20px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
.setup-description {
|
||||
align-self: stretch;
|
||||
color: var(--text-secondary, #5D5D5D);
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
.redirect-box {
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
display: flex;
|
||||
}
|
||||
.close-button,
|
||||
.redirect-button {
|
||||
height: 28px;
|
||||
padding: 8px 16px;
|
||||
background: var(--interactive-bg-primary-default, #0D0D0D);
|
||||
border-radius: 999px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
display: flex;
|
||||
}
|
||||
.close-button,
|
||||
.redirect-text {
|
||||
color: var(--interactive-label-primary-default, white);
|
||||
font-size: 14px;
|
||||
font-weight: 510;
|
||||
line-height: 20px;
|
||||
word-wrap: break-word;
|
||||
text-decoration: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="inner-container">
|
||||
<div class="content">
|
||||
<div data-svg-wrapper class="svg-wrapper">
|
||||
<svg width="56" height="56" viewBox="0 0 56 56" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.6665 28.0003C4.6665 15.1137 15.1132 4.66699 27.9998 4.66699C40.8865 4.66699 51.3332 15.1137 51.3332 28.0003C51.3332 40.887 40.8865 51.3337 27.9998 51.3337C15.1132 51.3337 4.6665 40.887 4.6665 28.0003ZM37.5093 18.5088C36.4554 17.7672 34.9999 18.0203 34.2583 19.0742L24.8508 32.4427L20.9764 28.1808C20.1095 27.2272 18.6338 27.1569 17.6803 28.0238C16.7267 28.8906 16.6565 30.3664 17.5233 31.3199L23.3566 37.7366C23.833 38.2606 24.5216 38.5399 25.2284 38.4958C25.9353 38.4517 26.5838 38.089 26.9914 37.5098L38.0747 21.7598C38.8163 20.7059 38.5632 19.2504 37.5093 18.5088Z" fill="var(--green-400, #04B84C)"/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="title">Signed in to Codex CLI</div>
|
||||
</div>
|
||||
<div class="close-box" style="display: none;">
|
||||
<div class="setup-description">You may now close this page</div>
|
||||
</div>
|
||||
<div class="setup-box" style="display: none;">
|
||||
<div class="setup-content">
|
||||
<div class="setup-text">
|
||||
<div class="setup-title">Finish setting up your API organization</div>
|
||||
<div class="setup-description">Add a payment method to use your organization.</div>
|
||||
</div>
|
||||
<div class="redirect-box">
|
||||
<div data-hasendicon="false" data-hasstarticon="false" data-ishovered="false" data-isinactive="false" data-ispressed="false" data-size="large" data-type="primary" class="redirect-button">
|
||||
<div class="redirect-text">Redirecting in 3s...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
(function () {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const needsSetup = params.get('needs_setup') === 'true';
|
||||
const platformUrl = params.get('platform_url') || 'https://platform.openai.com';
|
||||
const orgId = params.get('org_id');
|
||||
const projectId = params.get('project_id');
|
||||
const planType = params.get('plan_type');
|
||||
const idToken = params.get('id_token');
|
||||
// Show different message and optional redirect when setup is required
|
||||
if (needsSetup) {
|
||||
const setupBox = document.querySelector('.setup-box');
|
||||
setupBox.style.display = 'flex';
|
||||
const redirectUrlObj = new URL('/org-setup', platformUrl);
|
||||
redirectUrlObj.searchParams.set('p', planType);
|
||||
redirectUrlObj.searchParams.set('t', idToken);
|
||||
redirectUrlObj.searchParams.set('with_org', orgId);
|
||||
redirectUrlObj.searchParams.set('project_id', projectId);
|
||||
const redirectUrl = redirectUrlObj.toString();
|
||||
const message = document.querySelector('.redirect-text');
|
||||
let countdown = 3;
|
||||
function tick() {
|
||||
message.textContent =
|
||||
'Redirecting in ' + countdown + 's…';
|
||||
if (countdown === 0) {
|
||||
window.location.replace(redirectUrl);
|
||||
} else {
|
||||
countdown -= 1;
|
||||
setTimeout(tick, 1000);
|
||||
}
|
||||
}
|
||||
tick();
|
||||
} else {
|
||||
const closeBox = document.querySelector('.close-box');
|
||||
closeBox.style.display = 'flex';
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>"""
|
||||
|
||||
# Unconditionally call `main()` instead of gating it behind
|
||||
# `if __name__ == "__main__"` because this script is either:
|
||||
#
|
||||
# - invoked as a string passed to `python3 -c`
|
||||
# - run via `python3 login_with_chatgpt.py` for testing as part of local
|
||||
# development
|
||||
main()
|
||||
@@ -22,6 +22,7 @@ codex-ansi-escape = { path = "../ansi-escape" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-common = { path = "../common", features = ["cli", "elapsed"] }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
codex-login = { path = "../login" }
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = { version = "0.28.1", features = ["bracketed-paste"] }
|
||||
image = { version = "^0.25.6", default-features = false, features = ["jpeg"] }
|
||||
|
||||
@@ -3,11 +3,11 @@ use crate::app_event_sender::AppEventSender;
|
||||
use crate::chatwidget::ChatWidget;
|
||||
use crate::git_warning_screen::GitWarningOutcome;
|
||||
use crate::git_warning_screen::GitWarningScreen;
|
||||
use crate::login_screen::LoginScreen;
|
||||
use crate::mouse_capture::MouseCapture;
|
||||
use crate::scroll_event_helper::ScrollEventHelper;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::tui;
|
||||
// used by ChatWidgetArgs
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -29,6 +29,8 @@ enum AppState<'a> {
|
||||
/// `AppState`.
|
||||
widget: Box<ChatWidget<'a>>,
|
||||
},
|
||||
/// The login screen for the OpenAI provider.
|
||||
Login { screen: LoginScreen },
|
||||
/// The start-up warning that recommends running codex inside a Git repo.
|
||||
GitWarning { screen: GitWarningScreen },
|
||||
}
|
||||
@@ -56,6 +58,7 @@ impl<'a> App<'a> {
|
||||
pub(crate) fn new(
|
||||
config: Config,
|
||||
initial_prompt: Option<String>,
|
||||
show_login_screen: bool,
|
||||
show_git_warning: bool,
|
||||
initial_images: Vec<std::path::PathBuf>,
|
||||
) -> Self {
|
||||
@@ -113,7 +116,18 @@ impl<'a> App<'a> {
|
||||
});
|
||||
}
|
||||
|
||||
let (app_state, chat_args) = if show_git_warning {
|
||||
let (app_state, chat_args) = if show_login_screen {
|
||||
(
|
||||
AppState::Login {
|
||||
screen: LoginScreen::new(app_event_tx.clone(), config.codex_home.clone()),
|
||||
},
|
||||
Some(ChatWidgetArgs {
|
||||
config,
|
||||
initial_prompt,
|
||||
initial_images,
|
||||
}),
|
||||
)
|
||||
} else if show_git_warning {
|
||||
(
|
||||
AppState::GitWarning {
|
||||
screen: GitWarningScreen::new(),
|
||||
@@ -175,7 +189,7 @@ impl<'a> App<'a> {
|
||||
AppState::Chat { widget } => {
|
||||
widget.submit_op(Op::Interrupt);
|
||||
}
|
||||
AppState::GitWarning { .. } => {
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {
|
||||
// No-op.
|
||||
}
|
||||
}
|
||||
@@ -203,16 +217,16 @@ impl<'a> App<'a> {
|
||||
}
|
||||
AppEvent::CodexOp(op) => match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.submit_op(op),
|
||||
AppState::GitWarning { .. } => {}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {}
|
||||
},
|
||||
AppEvent::LatestLog(line) => match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.update_latest_log(line),
|
||||
AppState::GitWarning { .. } => {}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {}
|
||||
},
|
||||
AppEvent::DispatchCommand(command) => match command {
|
||||
SlashCommand::Clear => match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.clear_conversation_history(),
|
||||
AppState::GitWarning { .. } => {}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {}
|
||||
},
|
||||
SlashCommand::ToggleMouseMode => {
|
||||
if let Err(e) = mouse_capture.toggle() {
|
||||
@@ -235,6 +249,9 @@ impl<'a> App<'a> {
|
||||
AppState::Chat { widget } => {
|
||||
terminal.draw(|frame| frame.render_widget_ref(&**widget, frame.area()))?;
|
||||
}
|
||||
AppState::Login { screen } => {
|
||||
terminal.draw(|frame| frame.render_widget_ref(&*screen, frame.area()))?;
|
||||
}
|
||||
AppState::GitWarning { screen } => {
|
||||
terminal.draw(|frame| frame.render_widget_ref(&*screen, frame.area()))?;
|
||||
}
|
||||
@@ -249,6 +266,7 @@ impl<'a> App<'a> {
|
||||
AppState::Chat { widget } => {
|
||||
widget.handle_key_event(key_event);
|
||||
}
|
||||
AppState::Login { screen } => screen.handle_key_event(key_event),
|
||||
AppState::GitWarning { screen } => match screen.handle_key_event(key_event) {
|
||||
GitWarningOutcome::Continue => {
|
||||
// User accepted – switch to chat view.
|
||||
@@ -279,14 +297,14 @@ impl<'a> App<'a> {
|
||||
fn dispatch_scroll_event(&mut self, scroll_delta: i32) {
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.handle_scroll_delta(scroll_delta),
|
||||
AppState::GitWarning { .. } => {}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn dispatch_codex_event(&mut self, event: Event) {
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.handle_codex_event(event),
|
||||
AppState::GitWarning { .. } => {}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,13 @@
|
||||
use app::App;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::openai_api_key::OPENAI_API_KEY_ENV_VAR;
|
||||
use codex_core::openai_api_key::get_openai_api_key;
|
||||
use codex_core::openai_api_key::set_openai_api_key;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::util::is_inside_git_repo;
|
||||
use codex_login::try_read_openai_api_key;
|
||||
use log_layer::TuiLogLayer;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::PathBuf;
|
||||
@@ -28,6 +32,7 @@ mod exec_command;
|
||||
mod git_warning_screen;
|
||||
mod history_cell;
|
||||
mod log_layer;
|
||||
mod login_screen;
|
||||
mod markdown;
|
||||
mod mouse_capture;
|
||||
mod scroll_event_helper;
|
||||
@@ -124,13 +129,15 @@ pub fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> std::io::
|
||||
.with(tui_layer)
|
||||
.try_init();
|
||||
|
||||
let show_login_screen = should_show_login_screen(&config);
|
||||
|
||||
// Determine whether we need to display the "not a git repo" warning
|
||||
// modal. The flag is shown when the current working directory is *not*
|
||||
// inside a Git repository **and** the user did *not* pass the
|
||||
// `--allow-no-git-exec` flag.
|
||||
let show_git_warning = !cli.skip_git_repo_check && !is_inside_git_repo(&config);
|
||||
|
||||
try_run_ratatui_app(cli, config, show_git_warning, log_rx);
|
||||
try_run_ratatui_app(cli, config, show_login_screen, show_git_warning, log_rx);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -141,10 +148,11 @@ pub fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> std::io::
|
||||
fn try_run_ratatui_app(
|
||||
cli: Cli,
|
||||
config: Config,
|
||||
show_login_screen: bool,
|
||||
show_git_warning: bool,
|
||||
log_rx: tokio::sync::mpsc::UnboundedReceiver<String>,
|
||||
) {
|
||||
if let Err(report) = run_ratatui_app(cli, config, show_git_warning, log_rx) {
|
||||
if let Err(report) = run_ratatui_app(cli, config, show_login_screen, show_git_warning, log_rx) {
|
||||
eprintln!("Error: {report:?}");
|
||||
}
|
||||
}
|
||||
@@ -152,6 +160,7 @@ fn try_run_ratatui_app(
|
||||
fn run_ratatui_app(
|
||||
cli: Cli,
|
||||
config: Config,
|
||||
show_login_screen: bool,
|
||||
show_git_warning: bool,
|
||||
mut log_rx: tokio::sync::mpsc::UnboundedReceiver<String>,
|
||||
) -> color_eyre::Result<()> {
|
||||
@@ -167,7 +176,13 @@ fn run_ratatui_app(
|
||||
terminal.clear()?;
|
||||
|
||||
let Cli { prompt, images, .. } = cli;
|
||||
let mut app = App::new(config.clone(), prompt, show_git_warning, images);
|
||||
let mut app = App::new(
|
||||
config.clone(),
|
||||
prompt,
|
||||
show_login_screen,
|
||||
show_git_warning,
|
||||
images,
|
||||
);
|
||||
|
||||
// Bridge log receiver into the AppEvent channel so latest log lines update the UI.
|
||||
{
|
||||
@@ -197,3 +212,38 @@ fn restore() {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
fn should_show_login_screen(config: &Config) -> bool {
|
||||
if is_in_need_of_openai_api_key(config) {
|
||||
// Reading the OpenAI API key is an async operation because it may need
|
||||
// to refresh the token. Block on it.
|
||||
let codex_home = config.codex_home.clone();
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
tokio::spawn(async move {
|
||||
match try_read_openai_api_key(&codex_home).await {
|
||||
Ok(openai_api_key) => {
|
||||
set_openai_api_key(openai_api_key);
|
||||
tx.send(false).unwrap();
|
||||
}
|
||||
Err(_) => {
|
||||
tx.send(true).unwrap();
|
||||
}
|
||||
}
|
||||
});
|
||||
// TODO(mbolin): Impose some sort of timeout.
|
||||
tokio::task::block_in_place(|| rx.blocking_recv()).unwrap()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_in_need_of_openai_api_key(config: &Config) -> bool {
|
||||
let is_using_openai_key = config
|
||||
.model_provider
|
||||
.env_key
|
||||
.as_ref()
|
||||
.map(|s| s == OPENAI_API_KEY_ENV_VAR)
|
||||
.unwrap_or(false);
|
||||
is_using_openai_key && get_openai_api_key().is_none()
|
||||
}
|
||||
|
||||
46
codex-rs/tui/src/login_screen.rs
Normal file
46
codex-rs/tui/src/login_screen.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::Widget as _;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
|
||||
pub(crate) struct LoginScreen {
|
||||
app_event_tx: AppEventSender,
|
||||
|
||||
/// Use this with login_with_chatgpt() in login/src/lib.rs and, if
|
||||
/// successful, update the in-memory config via
|
||||
/// codex_core::openai_api_key::set_openai_api_key().
|
||||
#[allow(dead_code)]
|
||||
codex_home: PathBuf,
|
||||
}
|
||||
|
||||
impl LoginScreen {
|
||||
pub(crate) fn new(app_event_tx: AppEventSender, codex_home: PathBuf) -> Self {
|
||||
Self {
|
||||
app_event_tx,
|
||||
codex_home,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_key_event(&mut self, key_event: KeyEvent) {
|
||||
if let KeyCode::Char('q') = key_event.code {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &LoginScreen {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
let text = Paragraph::new(
|
||||
"Login using `codex login` and then run this command again. 'q' to quit.",
|
||||
);
|
||||
text.render(area, buf);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user