fix: switch MCP server to stdio transport for llmx compatibility
- Add wrapper script run-crawl4ai-mcp.sh for stdio transport - Update config.toml to use stdio instead of SSE - llmx uses Streamable HTTP transport which is incompatible with SSE - DB and REST still run via docker compose, MCP runs on-demand 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -5,6 +5,7 @@
|
|||||||
!.gitignore
|
!.gitignore
|
||||||
!config.toml
|
!config.toml
|
||||||
!compose.yaml
|
!compose.yaml
|
||||||
|
!*.sh
|
||||||
|
|
||||||
# Allow servers directory and its contents
|
# Allow servers directory and its contents
|
||||||
!servers/
|
!servers/
|
||||||
|
|||||||
10
config.toml
10
config.toml
@@ -20,6 +20,9 @@ trust_level = "trusted"
|
|||||||
[projects."/home/valknar/bin"]
|
[projects."/home/valknar/bin"]
|
||||||
trust_level = "trusted"
|
trust_level = "trusted"
|
||||||
|
|
||||||
|
[projects."/home/valknar/Projects/pivoine.art"]
|
||||||
|
trust_level = "trusted"
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# MCP SERVERS CONFIGURATION (10 servers)
|
# MCP SERVERS CONFIGURATION (10 servers)
|
||||||
# Last updated: 2025-11-24
|
# Last updated: 2025-11-24
|
||||||
@@ -137,8 +140,9 @@ startup_timeout_sec = 20
|
|||||||
[mcp_servers.crawl4ai_rag]
|
[mcp_servers.crawl4ai_rag]
|
||||||
# Web crawling + RAG with vector search (local Docker stack)
|
# Web crawling + RAG with vector search (local Docker stack)
|
||||||
# Uses BGE embeddings via remote LiteLLM, local PostgreSQL + pgvector
|
# Uses BGE embeddings via remote LiteLLM, local PostgreSQL + pgvector
|
||||||
# Start with: cd ~/.llmx && docker compose up -d
|
# Start DB first: cd ~/.llmx && docker compose up -d crawl4ai-db crawl4ai-rest
|
||||||
url = "http://localhost:8051/sse"
|
command = "/home/valknar/.llmx/run-crawl4ai-mcp.sh"
|
||||||
|
args = []
|
||||||
enabled = true
|
enabled = true
|
||||||
startup_timeout_sec = 30
|
startup_timeout_sec = 60
|
||||||
tool_timeout_sec = 120
|
tool_timeout_sec = 120
|
||||||
|
|||||||
16
run-crawl4ai-mcp.sh
Executable file
16
run-crawl4ai-mcp.sh
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Wrapper script to run crawl4ai-rag MCP server with environment variables
|
||||||
|
set -a
|
||||||
|
source "$(dirname "$0")/.env"
|
||||||
|
set +a
|
||||||
|
|
||||||
|
exec docker run -i --rm \
|
||||||
|
--network=llmx_default \
|
||||||
|
-e TRANSPORT=stdio \
|
||||||
|
-e SUPABASE_URL=http://crawl4ai-rest:3000 \
|
||||||
|
-e SUPABASE_SERVICE_KEY="$SUPABASE_SERVICE_KEY" \
|
||||||
|
-e OPENAI_API_KEY="$LITELLM_API_KEY" \
|
||||||
|
-e EMBEDDING_API_BASE=https://llm.ai.pivoine.art/v1 \
|
||||||
|
-e EMBEDDING_MODEL=bge-large-en-v1.5 \
|
||||||
|
-e EMBEDDING_DIMENSION=1024 \
|
||||||
|
llmx-crawl4ai-rag
|
||||||
Reference in New Issue
Block a user