feat: honor OPENAI_BASE_URL for the built-in openai provider (#1487)
Some users have proxies or other setups where they are ultimately hitting OpenAI endpoints, but need a custom `base_url` rather than the default value of `"https://api.openai.com/v1"`. This PR makes it possible to override the `base_url` for the `openai` provider via the `OPENAI_BASE_URL` environment variable.
This commit is contained in:
@@ -94,15 +94,15 @@ env_http_headers = { "X-Example-Features": "EXAMPLE_FEATURES" }
|
|||||||
|
|
||||||
## model_provider
|
## model_provider
|
||||||
|
|
||||||
Identifies which provider to use from the `model_providers` map. Defaults to `"openai"`.
|
Identifies which provider to use from the `model_providers` map. Defaults to `"openai"`. You can override the `base_url` for the built-in `openai` provider via the `OPENAI_BASE_URL` environment variable.
|
||||||
|
|
||||||
Note that if you override `model_provider`, then you likely want to override
|
Note that if you override `model_provider`, then you likely want to override
|
||||||
`model`, as well. For example, if you are running ollama with Mistral locally,
|
`model`, as well. For example, if you are running ollama with Mistral locally,
|
||||||
then you would need to add the following to your config in addition to the new entry in the `model_providers` map:
|
then you would need to add the following to your config in addition to the new entry in the `model_providers` map:
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
model = "mistral"
|
|
||||||
model_provider = "ollama"
|
model_provider = "ollama"
|
||||||
|
model = "mistral"
|
||||||
```
|
```
|
||||||
|
|
||||||
## approval_policy
|
## approval_policy
|
||||||
|
|||||||
@@ -176,7 +176,15 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
|
|||||||
"openai",
|
"openai",
|
||||||
P {
|
P {
|
||||||
name: "OpenAI".into(),
|
name: "OpenAI".into(),
|
||||||
base_url: "https://api.openai.com/v1".into(),
|
// Allow users to override the default OpenAI endpoint by
|
||||||
|
// exporting `OPENAI_BASE_URL`. This is useful when pointing
|
||||||
|
// Codex at a proxy, mock server, or Azure-style deployment
|
||||||
|
// without requiring a full TOML override for the built-in
|
||||||
|
// OpenAI provider.
|
||||||
|
base_url: std::env::var("OPENAI_BASE_URL")
|
||||||
|
.ok()
|
||||||
|
.filter(|v| !v.trim().is_empty())
|
||||||
|
.unwrap_or_else(|| "https://api.openai.com/v1".to_string()),
|
||||||
env_key: Some("OPENAI_API_KEY".into()),
|
env_key: Some("OPENAI_API_KEY".into()),
|
||||||
env_key_instructions: Some("Create an API key (https://platform.openai.com) and export it as an environment variable.".into()),
|
env_key_instructions: Some("Create an API key (https://platform.openai.com) and export it as an environment variable.".into()),
|
||||||
wire_api: WireApi::Responses,
|
wire_api: WireApi::Responses,
|
||||||
|
|||||||
Reference in New Issue
Block a user