Files
llmx/codex-rs/core/src/model_provider_info.rs
Michael Bolin e924070cee feat: support the chat completions API in the Rust CLI (#862)
This is a substantial PR to add support for the chat completions API,
which in turn makes it possible to use non-OpenAI model providers (just
like in the TypeScript CLI):

* It moves a number of structs from `client.rs` to `client_common.rs` so
they can be shared.
* It introduces support for the chat completions API in
`chat_completions.rs`.
* It updates `ModelProviderInfo` so that `env_key` is `Option<String>`
instead of `String` (for e.g., ollama) and adds a `wire_api` field
* It updates `client.rs` to choose between `stream_responses()` and
`stream_chat_completions()` based on the `wire_api` for the
`ModelProviderInfo`
* It updates the `exec` and TUI CLIs to no longer fail if the
`OPENAI_API_KEY` environment variable is not set
* It updates the TUI so that `EventMsg::Error` is displayed more
prominently when it occurs, particularly now that it is important to
alert users to the `CodexErr::EnvVar` variant.
* `CodexErr::EnvVar` was updated to include an optional `instructions`
field so we can preserve the behavior where we direct users to
https://platform.openai.com if `OPENAI_API_KEY` is not set.
* Cleaned up the "welcome message" in the TUI to ensure the model
provider is displayed.
* Updated the docs in `codex-rs/README.md`.

To exercise the chat completions API from OpenAI models, I added the
following to my `config.toml`:

```toml
model = "gpt-4o"
model_provider = "openai-chat-completions"

[model_providers.openai-chat-completions]
name = "OpenAI using Chat Completions"
base_url = "https://api.openai.com/v1"
env_key = "OPENAI_API_KEY"
wire_api = "chat"
```

Though to test a non-OpenAI provider, I installed ollama with mistral
locally on my Mac because ChatGPT said that would be a good match for my
hardware:

```shell
brew install ollama
ollama serve
ollama pull mistral
```

Then I added the following to my `~/.codex/config.toml`:

```toml
model = "mistral"
model_provider = "ollama"
```

Note this code could certainly use more test coverage, but I want to get
this in so folks can start playing with it.

For reference, I believe https://github.com/openai/codex/pull/247 was
roughly the comparable PR on the TypeScript side.
2025-05-08 21:46:06 -07:00

164 lines
5.5 KiB
Rust

//! Registry of model providers supported by Codex.
//!
//! Providers can be defined in two places:
//! 1. Built-in defaults compiled into the binary so Codex works out-of-the-box.
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
//! key. These override or extend the defaults at runtime.
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::env::VarError;
use crate::error::EnvVarError;
/// Wire protocol that the provider speaks. Most third-party services only
/// implement the classic OpenAI Chat Completions JSON schema, whereas OpenAI
/// itself (and a handful of others) additionally expose the more modern
/// *Responses* API. The two protocols use different request/response shapes
/// and *cannot* be auto-detected at runtime, therefore each provider entry
/// must declare which one it expects.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum WireApi {
/// The experimental “Responses” API exposed by OpenAI at `/v1/responses`.
#[default]
Responses,
/// Regular Chat Completions compatible with `/v1/chat/completions`.
Chat,
}
/// Serializable representation of a provider definition.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ModelProviderInfo {
/// Friendly display name.
pub name: String,
/// Base URL for the provider's OpenAI-compatible API.
pub base_url: String,
/// Environment variable that stores the user's API key for this provider.
pub env_key: Option<String>,
/// Optional instructions to help the user get a valid value for the
/// variable and set it.
pub env_key_instructions: Option<String>,
/// Which wire protocol this provider expects.
pub wire_api: WireApi,
}
impl ModelProviderInfo {
/// If `env_key` is Some, returns the API key for this provider if present
/// (and non-empty) in the environment. If `env_key` is required but
/// cannot be found, returns an error.
pub fn api_key(&self) -> crate::error::Result<Option<String>> {
match &self.env_key {
Some(env_key) => std::env::var(env_key)
.and_then(|v| {
if v.trim().is_empty() {
Err(VarError::NotPresent)
} else {
Ok(Some(v))
}
})
.map_err(|_| {
crate::error::CodexErr::EnvVar(EnvVarError {
var: env_key.clone(),
instructions: self.env_key_instructions.clone(),
})
}),
None => Ok(None),
}
}
}
/// Built-in default provider list.
pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
use ModelProviderInfo as P;
[
(
"openai",
P {
name: "OpenAI".into(),
base_url: "https://api.openai.com/v1".into(),
env_key: Some("OPENAI_API_KEY".into()),
env_key_instructions: Some("Create an API key (https://platform.openai.com) and export it as an environment variable.".into()),
wire_api: WireApi::Responses,
},
),
(
"openrouter",
P {
name: "OpenRouter".into(),
base_url: "https://openrouter.ai/api/v1".into(),
env_key: Some("OPENROUTER_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
(
"gemini",
P {
name: "Gemini".into(),
base_url: "https://generativelanguage.googleapis.com/v1beta/openai".into(),
env_key: Some("GEMINI_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
(
"ollama",
P {
name: "Ollama".into(),
base_url: "http://localhost:11434/v1".into(),
env_key: None,
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
(
"mistral",
P {
name: "Mistral".into(),
base_url: "https://api.mistral.ai/v1".into(),
env_key: Some("MISTRAL_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
(
"deepseek",
P {
name: "DeepSeek".into(),
base_url: "https://api.deepseek.com".into(),
env_key: Some("DEEPSEEK_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
(
"xai",
P {
name: "xAI".into(),
base_url: "https://api.x.ai/v1".into(),
env_key: Some("XAI_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
(
"groq",
P {
name: "Groq".into(),
base_url: "https://api.groq.com/openai/v1".into(),
env_key: Some("GROQ_API_KEY".into()),
env_key_instructions: None,
wire_api: WireApi::Chat,
},
),
]
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect()
}