[MCP] Introduce an experimental official rust sdk based mcp client (#4252)

The [official Rust
SDK](57fc428c57)
has come a long way since we first started our mcp client implementation
5 months ago and, today, it is much more complete than our own
stdio-only implementation.

This PR introduces a new config flag `experimental_use_rmcp_client`
which will use a new mcp client powered by the sdk instead of our own.

To keep this PR simple, I've only implemented the same stdio MCP
functionality that we had but will expand on it with future PRs.

---------

Co-authored-by: pakrym-oai <pakrym@openai.com>
This commit is contained in:
Gabriel Peal
2025-09-26 10:13:37 -07:00
committed by GitHub
parent ea095e30c1
commit e555a36c6a
17 changed files with 1136 additions and 62 deletions

View File

@@ -184,6 +184,10 @@ pub struct Config {
/// If set to `true`, used only the experimental unified exec tool.
pub use_experimental_unified_exec_tool: bool,
/// If set to `true`, use the experimental official Rust MCP client.
/// https://github.com/modelcontextprotocol/rust-sdk
pub use_experimental_use_rmcp_client: bool,
/// Include the `view_image` tool that lets the agent attach a local image path to context.
pub include_view_image_tool: bool,
@@ -693,6 +697,7 @@ pub struct ConfigToml {
pub experimental_use_exec_command_tool: Option<bool>,
pub experimental_use_unified_exec_tool: Option<bool>,
pub experimental_use_rmcp_client: Option<bool>,
pub projects: Option<HashMap<String, ProjectConfig>>,
@@ -1043,6 +1048,7 @@ impl Config {
use_experimental_unified_exec_tool: cfg
.experimental_use_unified_exec_tool
.unwrap_or(false),
use_experimental_use_rmcp_client: cfg.experimental_use_rmcp_client.unwrap_or(false),
include_view_image_tool,
active_profile: active_profile_name,
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
@@ -1651,6 +1657,7 @@ model_verbosity = "high"
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("o3".to_string()),
disable_paste_burst: false,
@@ -1709,6 +1716,7 @@ model_verbosity = "high"
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("gpt3".to_string()),
disable_paste_burst: false,
@@ -1782,6 +1790,7 @@ model_verbosity = "high"
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("zdr".to_string()),
disable_paste_burst: false,
@@ -1841,6 +1850,7 @@ model_verbosity = "high"
tools_web_search_request: false,
use_experimental_streamable_shell_tool: false,
use_experimental_unified_exec_tool: false,
use_experimental_use_rmcp_client: false,
include_view_image_tool: true,
active_profile: Some("gpt5".to_string()),
disable_paste_burst: false,