feat: support the chat completions API in the Rust CLI (#862)
This is a substantial PR to add support for the chat completions API, which in turn makes it possible to use non-OpenAI model providers (just like in the TypeScript CLI): * It moves a number of structs from `client.rs` to `client_common.rs` so they can be shared. * It introduces support for the chat completions API in `chat_completions.rs`. * It updates `ModelProviderInfo` so that `env_key` is `Option<String>` instead of `String` (for e.g., ollama) and adds a `wire_api` field * It updates `client.rs` to choose between `stream_responses()` and `stream_chat_completions()` based on the `wire_api` for the `ModelProviderInfo` * It updates the `exec` and TUI CLIs to no longer fail if the `OPENAI_API_KEY` environment variable is not set * It updates the TUI so that `EventMsg::Error` is displayed more prominently when it occurs, particularly now that it is important to alert users to the `CodexErr::EnvVar` variant. * `CodexErr::EnvVar` was updated to include an optional `instructions` field so we can preserve the behavior where we direct users to https://platform.openai.com if `OPENAI_API_KEY` is not set. * Cleaned up the "welcome message" in the TUI to ensure the model provider is displayed. * Updated the docs in `codex-rs/README.md`. To exercise the chat completions API from OpenAI models, I added the following to my `config.toml`: ```toml model = "gpt-4o" model_provider = "openai-chat-completions" [model_providers.openai-chat-completions] name = "OpenAI using Chat Completions" base_url = "https://api.openai.com/v1" env_key = "OPENAI_API_KEY" wire_api = "chat" ``` Though to test a non-OpenAI provider, I installed ollama with mistral locally on my Mac because ChatGPT said that would be a good match for my hardware: ```shell brew install ollama ollama serve ollama pull mistral ``` Then I added the following to my `~/.codex/config.toml`: ```toml model = "mistral" model_provider = "ollama" ``` Note this code could certainly use more test coverage, but I want to get this in so folks can start playing with it. For reference, I believe https://github.com/openai/codex/pull/247 was roughly the comparable PR on the TypeScript side.
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
use codex_core::protocol::Event;
|
||||
use crossterm::event::KeyEvent;
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub(crate) enum AppEvent {
|
||||
CodexEvent(Event),
|
||||
|
||||
|
||||
@@ -162,12 +162,8 @@ impl ChatWidget<'_> {
|
||||
}
|
||||
|
||||
fn submit_welcome_message(&mut self) -> std::result::Result<(), SendError<AppEvent>> {
|
||||
self.handle_codex_event(Event {
|
||||
id: "welcome".to_string(),
|
||||
msg: EventMsg::AgentMessage {
|
||||
message: "Welcome to codex!".to_string(),
|
||||
},
|
||||
})?;
|
||||
self.conversation_history.add_welcome_message(&self.config);
|
||||
self.request_redraw()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -231,8 +227,6 @@ impl ChatWidget<'_> {
|
||||
}
|
||||
EventMsg::TaskStarted => {
|
||||
self.bottom_pane.set_task_running(true)?;
|
||||
self.conversation_history
|
||||
.add_background_event(format!("task {id} started"));
|
||||
self.request_redraw()?;
|
||||
}
|
||||
EventMsg::TaskComplete => {
|
||||
@@ -240,8 +234,7 @@ impl ChatWidget<'_> {
|
||||
self.request_redraw()?;
|
||||
}
|
||||
EventMsg::Error { message } => {
|
||||
self.conversation_history
|
||||
.add_background_event(format!("Error: {message}"));
|
||||
self.conversation_history.add_error(message);
|
||||
self.bottom_pane.set_task_running(false)?;
|
||||
}
|
||||
EventMsg::ExecApprovalRequest {
|
||||
|
||||
@@ -162,6 +162,10 @@ impl ConversationHistoryWidget {
|
||||
self.scroll_position = usize::MAX;
|
||||
}
|
||||
|
||||
pub fn add_welcome_message(&mut self, config: &Config) {
|
||||
self.add_to_history(HistoryCell::new_welcome_message(config));
|
||||
}
|
||||
|
||||
pub fn add_user_message(&mut self, message: String) {
|
||||
self.add_to_history(HistoryCell::new_user_prompt(message));
|
||||
}
|
||||
@@ -174,6 +178,10 @@ impl ConversationHistoryWidget {
|
||||
self.add_to_history(HistoryCell::new_background_event(message));
|
||||
}
|
||||
|
||||
pub fn add_error(&mut self, message: String) {
|
||||
self.add_to_history(HistoryCell::new_error_event(message));
|
||||
}
|
||||
|
||||
/// Add a pending patch entry (before user approval).
|
||||
pub fn add_patch_event(
|
||||
&mut self,
|
||||
|
||||
@@ -32,6 +32,9 @@ pub(crate) enum PatchEventType {
|
||||
/// `Vec<Line<'static>>` representation to make it easier to display in a
|
||||
/// scrollable list.
|
||||
pub(crate) enum HistoryCell {
|
||||
/// Welcome message.
|
||||
WelcomeMessage { lines: Vec<Line<'static>> },
|
||||
|
||||
/// Message from the user.
|
||||
UserPrompt { lines: Vec<Line<'static>> },
|
||||
|
||||
@@ -69,6 +72,9 @@ pub(crate) enum HistoryCell {
|
||||
/// Background event
|
||||
BackgroundEvent { lines: Vec<Line<'static>> },
|
||||
|
||||
/// Error event from the backend.
|
||||
ErrorEvent { lines: Vec<Line<'static>> },
|
||||
|
||||
/// Info describing the newly‑initialized session.
|
||||
SessionInfo { lines: Vec<Line<'static>> },
|
||||
|
||||
@@ -85,6 +91,31 @@ pub(crate) enum HistoryCell {
|
||||
const TOOL_CALL_MAX_LINES: usize = 5;
|
||||
|
||||
impl HistoryCell {
|
||||
pub(crate) fn new_welcome_message(config: &Config) -> Self {
|
||||
let mut lines: Vec<Line<'static>> = vec![
|
||||
Line::from(vec![
|
||||
"OpenAI ".into(),
|
||||
"Codex".bold(),
|
||||
" (research preview)".dim(),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from("codex session:".magenta().bold()),
|
||||
];
|
||||
|
||||
let entries = vec![
|
||||
("workdir", config.cwd.display().to_string()),
|
||||
("model", config.model.clone()),
|
||||
("provider", config.model_provider_id.clone()),
|
||||
("approval", format!("{:?}", config.approval_policy)),
|
||||
("sandbox", format!("{:?}", config.sandbox_policy)),
|
||||
];
|
||||
for (key, value) in entries {
|
||||
lines.push(Line::from(vec![format!("{key}: ").bold(), value.into()]));
|
||||
}
|
||||
lines.push(Line::from(""));
|
||||
HistoryCell::WelcomeMessage { lines }
|
||||
}
|
||||
|
||||
pub(crate) fn new_user_prompt(message: String) -> Self {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
lines.push(Line::from("user".cyan().bold()));
|
||||
@@ -245,26 +276,26 @@ impl HistoryCell {
|
||||
HistoryCell::BackgroundEvent { lines }
|
||||
}
|
||||
|
||||
pub(crate) fn new_error_event(message: String) -> Self {
|
||||
let lines: Vec<Line<'static>> = vec![
|
||||
vec!["ERROR: ".red().bold(), message.into()].into(),
|
||||
"".into(),
|
||||
];
|
||||
HistoryCell::ErrorEvent { lines }
|
||||
}
|
||||
|
||||
pub(crate) fn new_session_info(config: &Config, model: String) -> Self {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
|
||||
lines.push(Line::from("codex session:".magenta().bold()));
|
||||
lines.push(Line::from(vec!["↳ model: ".bold(), model.into()]));
|
||||
lines.push(Line::from(vec![
|
||||
"↳ cwd: ".bold(),
|
||||
config.cwd.display().to_string().into(),
|
||||
]));
|
||||
lines.push(Line::from(vec![
|
||||
"↳ approval: ".bold(),
|
||||
format!("{:?}", config.approval_policy).into(),
|
||||
]));
|
||||
lines.push(Line::from(vec![
|
||||
"↳ sandbox: ".bold(),
|
||||
format!("{:?}", config.sandbox_policy).into(),
|
||||
]));
|
||||
lines.push(Line::from(""));
|
||||
|
||||
HistoryCell::SessionInfo { lines }
|
||||
if config.model == model {
|
||||
HistoryCell::SessionInfo { lines: vec![] }
|
||||
} else {
|
||||
let lines = vec![
|
||||
Line::from("model changed:".magenta().bold()),
|
||||
Line::from(format!("requested: {}", config.model)),
|
||||
Line::from(format!("used: {}", model)),
|
||||
Line::from(""),
|
||||
];
|
||||
HistoryCell::SessionInfo { lines }
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new `PendingPatch` cell that lists the file‑level summary of
|
||||
@@ -329,9 +360,11 @@ impl HistoryCell {
|
||||
|
||||
pub(crate) fn lines(&self) -> &Vec<Line<'static>> {
|
||||
match self {
|
||||
HistoryCell::UserPrompt { lines, .. }
|
||||
HistoryCell::WelcomeMessage { lines, .. }
|
||||
| HistoryCell::UserPrompt { lines, .. }
|
||||
| HistoryCell::AgentMessage { lines, .. }
|
||||
| HistoryCell::BackgroundEvent { lines, .. }
|
||||
| HistoryCell::ErrorEvent { lines, .. }
|
||||
| HistoryCell::SessionInfo { lines, .. }
|
||||
| HistoryCell::ActiveExecCommand { lines, .. }
|
||||
| HistoryCell::CompletedExecCommand { lines, .. }
|
||||
|
||||
@@ -33,8 +33,6 @@ mod user_approval_widget;
|
||||
pub use cli::Cli;
|
||||
|
||||
pub fn run_main(cli: Cli) -> std::io::Result<()> {
|
||||
assert_env_var_set();
|
||||
|
||||
let (sandbox_policy, approval_policy) = if cli.full_auto {
|
||||
(
|
||||
Some(SandboxPolicy::new_full_auto_policy()),
|
||||
@@ -172,20 +170,6 @@ fn run_ratatui_app(
|
||||
app_result
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::print_stderr,
|
||||
reason = "TUI should not have been displayed yet, so we can write to stderr."
|
||||
)]
|
||||
fn assert_env_var_set() {
|
||||
if std::env::var("OPENAI_API_KEY").is_err() {
|
||||
eprintln!("Welcome to codex! It looks like you're missing: `OPENAI_API_KEY`");
|
||||
eprintln!(
|
||||
"Create an API key (https://platform.openai.com) and export as an environment variable"
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::print_stderr,
|
||||
reason = "TUI should no longer be displayed, so we can write to stderr."
|
||||
|
||||
Reference in New Issue
Block a user