diff --git a/.gitignore b/.gitignore index a4d95c7..ec1f311 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ !.gitignore !config.toml !compose.yaml +!*.sh # Allow servers directory and its contents !servers/ diff --git a/config.toml b/config.toml index 778df68..0d59e2e 100644 --- a/config.toml +++ b/config.toml @@ -20,6 +20,9 @@ trust_level = "trusted" [projects."/home/valknar/bin"] trust_level = "trusted" +[projects."/home/valknar/Projects/pivoine.art"] +trust_level = "trusted" + # ============================================================================== # MCP SERVERS CONFIGURATION (10 servers) # Last updated: 2025-11-24 @@ -137,8 +140,9 @@ startup_timeout_sec = 20 [mcp_servers.crawl4ai_rag] # Web crawling + RAG with vector search (local Docker stack) # Uses BGE embeddings via remote LiteLLM, local PostgreSQL + pgvector -# Start with: cd ~/.llmx && docker compose up -d -url = "http://localhost:8051/sse" +# Start DB first: cd ~/.llmx && docker compose up -d crawl4ai-db crawl4ai-rest +command = "/home/valknar/.llmx/run-crawl4ai-mcp.sh" +args = [] enabled = true -startup_timeout_sec = 30 +startup_timeout_sec = 60 tool_timeout_sec = 120 diff --git a/run-crawl4ai-mcp.sh b/run-crawl4ai-mcp.sh new file mode 100755 index 0000000..06ab51b --- /dev/null +++ b/run-crawl4ai-mcp.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Wrapper script to run crawl4ai-rag MCP server with environment variables +set -a +source "$(dirname "$0")/.env" +set +a + +exec docker run -i --rm \ + --network=llmx_default \ + -e TRANSPORT=stdio \ + -e SUPABASE_URL=http://crawl4ai-rest:3000 \ + -e SUPABASE_SERVICE_KEY="$SUPABASE_SERVICE_KEY" \ + -e OPENAI_API_KEY="$LITELLM_API_KEY" \ + -e EMBEDDING_API_BASE=https://llm.ai.pivoine.art/v1 \ + -e EMBEDDING_MODEL=bge-large-en-v1.5 \ + -e EMBEDDING_DIMENSION=1024 \ + llmx-crawl4ai-rag