feat: read model_provider and model_providers from config.toml (#853)

This is the first step in supporting other model providers in the Rust
CLI. Specifically, this PR adds support for the new entries in `Config`
and `ConfigOverrides` to specify a `ModelProviderInfo`, which is the
basic config needed for an LLM provider. This PR does not get us all the
way there yet because `client.rs` still categorically appends
`/responses` to the URL and expects the endpoint to support the OpenAI
Responses API. Will fix that next!
This commit is contained in:
Michael Bolin
2025-05-07 17:38:28 -07:00
committed by GitHub
parent cfe50c7107
commit 86022f097e
12 changed files with 208 additions and 30 deletions

View File

@@ -11,6 +11,8 @@ use mcp_types::CallToolResult;
use serde::Deserialize;
use serde::Serialize;
use crate::model_provider_info::ModelProviderInfo;
/// Submission Queue Entry - requests from user
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Submission {
@@ -27,6 +29,9 @@ pub struct Submission {
pub enum Op {
/// Configure the model session.
ConfigureSession {
/// Provider identifier ("openai", "openrouter", ...).
provider: ModelProviderInfo,
/// If not specified, server will use its default model.
model: String,
/// Model instructions