From 45624a4d915eea23774bfa499a4426fbf8923534 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Sun, 30 Nov 2025 21:44:05 +0100 Subject: [PATCH] fix: switch MCP server to stdio transport for llmx compatibility MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add wrapper script run-crawl4ai-mcp.sh for stdio transport - Update config.toml to use stdio instead of SSE - llmx uses Streamable HTTP transport which is incompatible with SSE - DB and REST still run via docker compose, MCP runs on-demand 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .gitignore | 1 + config.toml | 10 +++++++--- run-crawl4ai-mcp.sh | 16 ++++++++++++++++ 3 files changed, 24 insertions(+), 3 deletions(-) create mode 100755 run-crawl4ai-mcp.sh diff --git a/.gitignore b/.gitignore index a4d95c7..ec1f311 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ !.gitignore !config.toml !compose.yaml +!*.sh # Allow servers directory and its contents !servers/ diff --git a/config.toml b/config.toml index 778df68..0d59e2e 100644 --- a/config.toml +++ b/config.toml @@ -20,6 +20,9 @@ trust_level = "trusted" [projects."/home/valknar/bin"] trust_level = "trusted" +[projects."/home/valknar/Projects/pivoine.art"] +trust_level = "trusted" + # ============================================================================== # MCP SERVERS CONFIGURATION (10 servers) # Last updated: 2025-11-24 @@ -137,8 +140,9 @@ startup_timeout_sec = 20 [mcp_servers.crawl4ai_rag] # Web crawling + RAG with vector search (local Docker stack) # Uses BGE embeddings via remote LiteLLM, local PostgreSQL + pgvector -# Start with: cd ~/.llmx && docker compose up -d -url = "http://localhost:8051/sse" +# Start DB first: cd ~/.llmx && docker compose up -d crawl4ai-db crawl4ai-rest +command = "/home/valknar/.llmx/run-crawl4ai-mcp.sh" +args = [] enabled = true -startup_timeout_sec = 30 +startup_timeout_sec = 60 tool_timeout_sec = 120 diff --git a/run-crawl4ai-mcp.sh b/run-crawl4ai-mcp.sh new file mode 100755 index 0000000..06ab51b --- /dev/null +++ b/run-crawl4ai-mcp.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Wrapper script to run crawl4ai-rag MCP server with environment variables +set -a +source "$(dirname "$0")/.env" +set +a + +exec docker run -i --rm \ + --network=llmx_default \ + -e TRANSPORT=stdio \ + -e SUPABASE_URL=http://crawl4ai-rest:3000 \ + -e SUPABASE_SERVICE_KEY="$SUPABASE_SERVICE_KEY" \ + -e OPENAI_API_KEY="$LITELLM_API_KEY" \ + -e EMBEDDING_API_BASE=https://llm.ai.pivoine.art/v1 \ + -e EMBEDDING_MODEL=bge-large-en-v1.5 \ + -e EMBEDDING_DIMENSION=1024 \ + llmx-crawl4ai-rag