feat: read model_provider and model_providers from config.toml (#853)
This is the first step in supporting other model providers in the Rust CLI. Specifically, this PR adds support for the new entries in `Config` and `ConfigOverrides` to specify a `ModelProviderInfo`, which is the basic config needed for an LLM provider. This PR does not get us all the way there yet because `client.rs` still categorically appends `/responses` to the URL and expects the endpoint to support the OpenAI Responses API. Will fix that next!
This commit is contained in:
@@ -7,6 +7,7 @@
|
||||
|
||||
mod client;
|
||||
pub mod codex;
|
||||
pub use codex::Codex;
|
||||
pub mod codex_wrapper;
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
@@ -18,6 +19,8 @@ pub mod linux;
|
||||
mod mcp_connection_manager;
|
||||
pub mod mcp_server_config;
|
||||
mod mcp_tool_call;
|
||||
mod model_provider_info;
|
||||
pub use model_provider_info::ModelProviderInfo;
|
||||
mod models;
|
||||
pub mod protocol;
|
||||
mod rollout;
|
||||
@@ -25,5 +28,3 @@ mod safety;
|
||||
mod user_notification;
|
||||
pub mod util;
|
||||
mod zdr_transcript;
|
||||
|
||||
pub use codex::Codex;
|
||||
|
||||
Reference in New Issue
Block a user