From 10bcbb2120f388a5d28c7f4430daf3bace1c2667 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Tue, 25 Nov 2025 08:29:43 +0100 Subject: [PATCH] feat: add LLMX configuration with Crawl4AI RAG MCP server MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add config.toml with MCP servers configuration - Add compose.yaml for PostgreSQL+pgvector, PostgREST, and Crawl4AI RAG - Include forked mcp-crawl4ai-rag with BGE 1024-dim embedding support - Custom schema (crawled_pages_1024.sql) for BGE embeddings šŸ¤– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .gitignore | 21 + compose.yaml | 71 + config.toml | 144 + servers/mcp-crawl4ai-rag/.dockerignore | 4 + servers/mcp-crawl4ai-rag/.env.example | 59 + servers/mcp-crawl4ai-rag/.gitattributes | 2 + servers/mcp-crawl4ai-rag/.gitignore | 7 + servers/mcp-crawl4ai-rag/Dockerfile | 21 + servers/mcp-crawl4ai-rag/LICENSE | 21 + servers/mcp-crawl4ai-rag/README.md | 455 ++++ servers/mcp-crawl4ai-rag/crawled_pages.sql | 175 ++ .../mcp-crawl4ai-rag/crawled_pages_1024.sql | 226 ++ .../ai_hallucination_detector.py | 335 +++ .../knowledge_graphs/ai_script_analyzer.py | 532 ++++ .../hallucination_reporter.py | 523 ++++ .../knowledge_graph_validator.py | 1244 +++++++++ .../knowledge_graphs/parse_repo_into_neo4j.py | 858 ++++++ .../knowledge_graphs/query_knowledge_graph.py | 400 +++ .../knowledge_graphs/test_script.py | 160 ++ servers/mcp-crawl4ai-rag/pyproject.toml | 15 + servers/mcp-crawl4ai-rag/src/crawl4ai_mcp.py | 1854 +++++++++++++ servers/mcp-crawl4ai-rag/src/utils.py | 749 ++++++ servers/mcp-crawl4ai-rag/uv.lock | 2348 +++++++++++++++++ 23 files changed, 10224 insertions(+) create mode 100644 .gitignore create mode 100644 compose.yaml create mode 100644 config.toml create mode 100644 servers/mcp-crawl4ai-rag/.dockerignore create mode 100644 servers/mcp-crawl4ai-rag/.env.example create mode 100644 servers/mcp-crawl4ai-rag/.gitattributes create mode 100644 servers/mcp-crawl4ai-rag/.gitignore create mode 100644 servers/mcp-crawl4ai-rag/Dockerfile create mode 100644 servers/mcp-crawl4ai-rag/LICENSE create mode 100644 servers/mcp-crawl4ai-rag/README.md create mode 100644 servers/mcp-crawl4ai-rag/crawled_pages.sql create mode 100644 servers/mcp-crawl4ai-rag/crawled_pages_1024.sql create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/ai_hallucination_detector.py create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/ai_script_analyzer.py create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/hallucination_reporter.py create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/knowledge_graph_validator.py create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/parse_repo_into_neo4j.py create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/query_knowledge_graph.py create mode 100644 servers/mcp-crawl4ai-rag/knowledge_graphs/test_script.py create mode 100644 servers/mcp-crawl4ai-rag/pyproject.toml create mode 100644 servers/mcp-crawl4ai-rag/src/crawl4ai_mcp.py create mode 100644 servers/mcp-crawl4ai-rag/src/utils.py create mode 100644 servers/mcp-crawl4ai-rag/uv.lock diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a4d95c7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +# Ignore everything by default +* + +# Allow configuration files +!.gitignore +!config.toml +!compose.yaml + +# Allow servers directory and its contents +!servers/ +!servers/** + +# Ignore sensitive files +.env +*.log +*.tmp + +# Ignore Docker/runtime data +__pycache__/ +*.pyc +.cache/ diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 0000000..6036d4e --- /dev/null +++ b/compose.yaml @@ -0,0 +1,71 @@ +services: + # PostgreSQL with pgvector for vector storage + crawl4ai-db: + image: pgvector/pgvector:pg16 + container_name: llmx_crawl4ai_db + restart: unless-stopped + ports: + - "5433:5432" + volumes: + - crawl4ai_data:/var/lib/postgresql/data + - ./servers/mcp-crawl4ai-rag/crawled_pages_1024.sql:/docker-entrypoint-initdb.d/01_schema.sql:ro + environment: + POSTGRES_USER: crawl4ai + POSTGRES_PASSWORD: ${CRAWL4AI_DB_PASSWORD} + POSTGRES_DB: crawl4ai + healthcheck: + test: ["CMD-SHELL", "pg_isready -U crawl4ai"] + interval: 10s + timeout: 5s + retries: 5 + + # PostgREST - Supabase-compatible REST API + crawl4ai-rest: + image: postgrest/postgrest:v12.2.0 + container_name: llmx_crawl4ai_rest + restart: unless-stopped + ports: + - "3001:3000" + environment: + PGRST_DB_URI: postgresql://crawl4ai:${CRAWL4AI_DB_PASSWORD}@crawl4ai-db:5432/crawl4ai + PGRST_DB_SCHEMAS: public + PGRST_DB_ANON_ROLE: anon + PGRST_JWT_SECRET: ${JWT_SECRET} + PGRST_DB_EXTRA_SEARCH_PATH: public + depends_on: + crawl4ai-db: + condition: service_healthy + + # Crawl4AI RAG MCP Server + crawl4ai-rag: + build: + context: ./servers/mcp-crawl4ai-rag + container_name: llmx_crawl4ai_rag + restart: unless-stopped + ports: + - "8051:8051" + environment: + # MCP Transport + TRANSPORT: sse + HOST: 0.0.0.0 + PORT: 8051 + # Embedding config - point to remote LiteLLM + EMBEDDING_API_BASE: https://llm.ai.pivoine.art/v1 + EMBEDDING_MODEL: bge-large-en-v1.5 + EMBEDDING_DIMENSION: "1024" + OPENAI_API_KEY: ${LITELLM_API_KEY} + # Supabase-compatible config (pointing to PostgREST) + SUPABASE_URL: http://crawl4ai-rest:3000 + SUPABASE_SERVICE_KEY: ${SUPABASE_SERVICE_KEY} + # Feature flags + USE_CONTEXTUAL_EMBEDDINGS: "false" + USE_HYBRID_SEARCH: "true" + USE_AGENTIC_RAG: "false" + USE_RERANKING: "false" + USE_KNOWLEDGE_GRAPH: "false" + depends_on: + - crawl4ai-rest + +volumes: + crawl4ai_data: + name: llmx_crawl4ai_data diff --git a/config.toml b/config.toml new file mode 100644 index 0000000..3fbd5a9 --- /dev/null +++ b/config.toml @@ -0,0 +1,144 @@ +model_provider = "litellm" +# model = "anthropic/claude-sonnet-4-5-20250929" +model = "hosted_vllm/openai/qwen-2.5-7b" + +[projects."/home/valknar"] +trust_level = "trusted" + +[projects."/home/valknar/Projects/llmx"] +trust_level = "trusted" + +[projects."/home/valknar/Projects/docker-compose"] +trust_level = "trusted" + +[projects."/home/valknar/Projects/kit-ui"] +trust_level = "trusted" + +[projects."/home/valknar/Projects/image-ui"] +trust_level = "trusted" + +[projects."/home/valknar/bin"] +trust_level = "trusted" + +# ============================================================================== +# MCP SERVERS CONFIGURATION (10 servers) +# Last updated: 2025-11-24 +# Removed: sqlite, brave_search, filescope, in_memoria, rust_filesystem (broken/incompatible) +# ============================================================================== + +# ============================================================================== +# ESSENTIAL CORE SERVERS (3) +# ============================================================================== + +[mcp_servers.filesystem] +# TypeScript implementation - stable and battle-tested +command = "npx" +args = ["-y", "@modelcontextprotocol/server-filesystem", "/home/valknar"] +enabled = true +startup_timeout_sec = 10 + +[mcp_servers.git] +# Git operations - Python uvx (no NPM package available) +command = "uvx" +args = ["mcp-server-git"] +enabled = true +startup_timeout_sec = 10 + +[mcp_servers.playwright] +# Browser automation - most popular MCP server (826k weekly downloads) +command = "npx" +args = ["-y", "@playwright/mcp"] +enabled = true +startup_timeout_sec = 20 + +# ============================================================================== +# HIGHLY RECOMMENDED SERVERS (2) +# ============================================================================== + +[mcp_servers.duckduckgo] +# Web search - Privacy-focused, no API key required +command = "npx" +args = ["-y", "duckduckgo-mcp-server"] +enabled = true +startup_timeout_sec = 10 + +[mcp_servers.portainer] +# Docker container management (requires Portainer running) +command = "npx" +args = ["-y", "@portainer/portainer-mcp"] +enabled = false # Enable after configuring Portainer +startup_timeout_sec = 15 + +[mcp_servers.python_runner] +# Secure Python sandbox execution (Python uvx, requires Deno) +command = "uvx" +args = ["mcp-run-python", "stdio"] +enabled = true +startup_timeout_sec = 15 + +# ============================================================================== +# SPECIALIZED SERVERS (5) +# ============================================================================== + +[mcp_servers.commands] +# Shell command execution +command = "npx" +args = ["-y", "mcp-server-commands"] +enabled = true +startup_timeout_sec = 10 + +[mcp_servers.codemcp] +# Unified read, write, and command execution (Python uvx from GitHub) +command = "uvx" +args = ["--from", "git+https://github.com/ezyang/codemcp@prod", "codemcp"] +enabled = true +startup_timeout_sec = 60 + +[mcp_servers.anyquery] +# SQL interface to 40+ services (requires Go installation) +command = "anyquery" +args = ["mcp"] +enabled = false # Enable after: go install github.com/julien040/anyquery@latest +startup_timeout_sec = 20 + +[mcp_servers.openapi] +# Generic OpenAPI integration (Docker-based) +command = "docker" +args = ["run", "-i", "--rm", "snaggle/openapi-mcp"] +enabled = false # Configure --openapi-url for specific API +startup_timeout_sec = 20 + +# ============================================================================== +# AGGREGATORS (2) +# ============================================================================== + +[mcp_servers.pipedream] +# 2,500+ APIs with 8,000+ prebuilt tools (requires account) +url = "https://api.pipedream.com/mcp" +bearer_token_env_var = "PIPEDREAM_API_KEY" +enabled = false # Enable after creating Pipedream account +startup_timeout_sec = 20 + +# ============================================================================== +# ADDITIONAL CUSTOM SERVERS (1) +# ============================================================================== + +[mcp_servers.github_official] +# Official GitHub MCP server (Docker-based) +command = "docker" +args = ["run", "-i", "--rm", "-e", "GITHUB_TOKEN", "ghcr.io/github/github-mcp-server"] +enabled = false # Enable if you prefer native MCP over gh CLI +startup_timeout_sec = 20 + +# ============================================================================== +# RAG SERVERS (1) +# ============================================================================== + +[mcp_servers.crawl4ai_rag] +# Web crawling + RAG with vector search (local Docker stack) +# Uses BGE embeddings via remote LiteLLM, local PostgreSQL + pgvector +# Start with: cd ~/.llmx && docker compose up -d +url = "http://localhost:8051/sse" +enabled = true +startup_timeout_sec = 30 +tool_timeout_sec = 120 diff --git a/servers/mcp-crawl4ai-rag/.dockerignore b/servers/mcp-crawl4ai-rag/.dockerignore new file mode 100644 index 0000000..f2e7542 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/.dockerignore @@ -0,0 +1,4 @@ +crawl4ai_mcp.egg-info +__pycache__ +.venv +.env \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/.env.example b/servers/mcp-crawl4ai-rag/.env.example new file mode 100644 index 0000000..17f375d --- /dev/null +++ b/servers/mcp-crawl4ai-rag/.env.example @@ -0,0 +1,59 @@ +# The transport for the MCP server - either 'sse' or 'stdio' (defaults to sse if left empty) +TRANSPORT= + +# Host to bind to if using sse as the transport (leave empty if using stdio) +# Set this to 0.0.0.0 if using Docker, otherwise set to localhost (if using uv) +HOST= + +# Port to listen on if using sse as the transport (leave empty if using stdio) +PORT= + +# Get your Open AI API Key by following these instructions - +# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key +# This is for the embedding model - text-embed-small-3 will be used +OPENAI_API_KEY= + +# The LLM you want to use for summaries and contextual embeddings +# Generally this is a very cheap and fast LLM like gpt-4.1-nano +MODEL_CHOICE= + +# RAG strategies - set these to "true" or "false" (default to "false") +# USE_CONTEXTUAL_EMBEDDINGS: Enhances embeddings with contextual information for better retrieval +USE_CONTEXTUAL_EMBEDDINGS=false + +# USE_HYBRID_SEARCH: Combines vector similarity search with keyword search for better results +USE_HYBRID_SEARCH=false + +# USE_AGENTIC_RAG: Enables code example extraction, storage, and specialized code search functionality +USE_AGENTIC_RAG=false + +# USE_RERANKING: Applies cross-encoder reranking to improve search result relevance +USE_RERANKING=false + +# USE_KNOWLEDGE_GRAPH: Enables AI hallucination detection and repository parsing tools using Neo4j +# If you set this to true, you must also set the Neo4j environment variables below. +USE_KNOWLEDGE_GRAPH=false + +# For the Supabase version (sample_supabase_agent.py), set your Supabase URL and Service Key. +# Get your SUPABASE_URL from the API section of your Supabase project settings - +# https://supabase.com/dashboard/project//settings/api +SUPABASE_URL= + +# Get your SUPABASE_SERVICE_KEY from the API section of your Supabase project settings - +# https://supabase.com/dashboard/project//settings/api +# On this page it is called the service_role secret. +SUPABASE_SERVICE_KEY= + +# Neo4j Configuration for Knowledge Graph Tools +# These are required for the AI hallucination detection and repository parsing tools +# Leave empty to disable knowledge graph functionality + +# Neo4j connection URI - use bolt://localhost:7687 for local, neo4j:// for cloud instances +# IMPORTANT: If running the MCP server through Docker, change localhost to host.docker.internal +NEO4J_URI=bolt://localhost:7687 + +# Neo4j username (usually 'neo4j' for default installations) +NEO4J_USER=neo4j + +# Neo4j password for your database instance +NEO4J_PASSWORD= \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/.gitattributes b/servers/mcp-crawl4ai-rag/.gitattributes new file mode 100644 index 0000000..dfe0770 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto diff --git a/servers/mcp-crawl4ai-rag/.gitignore b/servers/mcp-crawl4ai-rag/.gitignore new file mode 100644 index 0000000..ff95578 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/.gitignore @@ -0,0 +1,7 @@ +.env +.venv +__pycache__ +crawl4ai_mcp.egg-info +repos +.claude +test_script_hallucination* \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/Dockerfile b/servers/mcp-crawl4ai-rag/Dockerfile new file mode 100644 index 0000000..0fa9532 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +ARG PORT=8051 + +WORKDIR /app + +# Install uv +RUN pip install uv + +# Copy the MCP server files +COPY . . + +# Install packages directly to the system (no virtual environment) +# Combining commands to reduce Docker layers +RUN uv pip install --system -e . && \ + crawl4ai-setup + +EXPOSE ${PORT} + +# Command to run the MCP server +CMD ["python", "src/crawl4ai_mcp.py"] diff --git a/servers/mcp-crawl4ai-rag/LICENSE b/servers/mcp-crawl4ai-rag/LICENSE new file mode 100644 index 0000000..44b90e6 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Cole Medin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/servers/mcp-crawl4ai-rag/README.md b/servers/mcp-crawl4ai-rag/README.md new file mode 100644 index 0000000..b0eaeab --- /dev/null +++ b/servers/mcp-crawl4ai-rag/README.md @@ -0,0 +1,455 @@ +

Crawl4AI RAG MCP Server

+ +

+ Web Crawling and RAG Capabilities for AI Agents and AI Coding Assistants +

+ +A powerful implementation of the [Model Context Protocol (MCP)](https://modelcontextprotocol.io) integrated with [Crawl4AI](https://crawl4ai.com) and [Supabase](https://supabase.com/) for providing AI agents and AI coding assistants with advanced web crawling and RAG capabilities. + +With this MCP server, you can scrape anything and then use that knowledge anywhere for RAG. + +The primary goal is to bring this MCP server into [Archon](https://github.com/coleam00/Archon) as I evolve it to be more of a knowledge engine for AI coding assistants to build AI agents. This first version of the Crawl4AI/RAG MCP server will be improved upon greatly soon, especially making it more configurable so you can use different embedding models and run everything locally with Ollama. + +Consider this GitHub repository a testbed, hence why I haven't been super actively address issues and pull requests yet. I certainly will though as I bring this into Archon V2! + +## Overview + +This MCP server provides tools that enable AI agents to crawl websites, store content in a vector database (Supabase), and perform RAG over the crawled content. It follows the best practices for building MCP servers based on the [Mem0 MCP server template](https://github.com/coleam00/mcp-mem0/) I provided on my channel previously. + +The server includes several advanced RAG strategies that can be enabled to enhance retrieval quality: +- **Contextual Embeddings** for enriched semantic understanding +- **Hybrid Search** combining vector and keyword search +- **Agentic RAG** for specialized code example extraction +- **Reranking** for improved result relevance using cross-encoder models +- **Knowledge Graph** for AI hallucination detection and repository code analysis + +See the [Configuration section](#configuration) below for details on how to enable and configure these strategies. + +## Vision + +The Crawl4AI RAG MCP server is just the beginning. Here's where we're headed: + +1. **Integration with Archon**: Building this system directly into [Archon](https://github.com/coleam00/Archon) to create a comprehensive knowledge engine for AI coding assistants to build better AI agents. + +2. **Multiple Embedding Models**: Expanding beyond OpenAI to support a variety of embedding models, including the ability to run everything locally with Ollama for complete control and privacy. + +3. **Advanced RAG Strategies**: Implementing sophisticated retrieval techniques like contextual retrieval, late chunking, and others to move beyond basic "naive lookups" and significantly enhance the power and precision of the RAG system, especially as it integrates with Archon. + +4. **Enhanced Chunking Strategy**: Implementing a Context 7-inspired chunking approach that focuses on examples and creates distinct, semantically meaningful sections for each chunk, improving retrieval precision. + +5. **Performance Optimization**: Increasing crawling and indexing speed to make it more realistic to "quickly" index new documentation to then leverage it within the same prompt in an AI coding assistant. + +## Features + +- **Smart URL Detection**: Automatically detects and handles different URL types (regular webpages, sitemaps, text files) +- **Recursive Crawling**: Follows internal links to discover content +- **Parallel Processing**: Efficiently crawls multiple pages simultaneously +- **Content Chunking**: Intelligently splits content by headers and size for better processing +- **Vector Search**: Performs RAG over crawled content, optionally filtering by data source for precision +- **Source Retrieval**: Retrieve sources available for filtering to guide the RAG process + +## Tools + +The server provides essential web crawling and search tools: + +### Core Tools (Always Available) + +1. **`crawl_single_page`**: Quickly crawl a single web page and store its content in the vector database +2. **`smart_crawl_url`**: Intelligently crawl a full website based on the type of URL provided (sitemap, llms-full.txt, or a regular webpage that needs to be crawled recursively) +3. **`get_available_sources`**: Get a list of all available sources (domains) in the database +4. **`perform_rag_query`**: Search for relevant content using semantic search with optional source filtering + +### Conditional Tools + +5. **`search_code_examples`** (requires `USE_AGENTIC_RAG=true`): Search specifically for code examples and their summaries from crawled documentation. This tool provides targeted code snippet retrieval for AI coding assistants. + +### Knowledge Graph Tools (requires `USE_KNOWLEDGE_GRAPH=true`, see below) + +6. **`parse_github_repository`**: Parse a GitHub repository into a Neo4j knowledge graph, extracting classes, methods, functions, and their relationships for hallucination detection +7. **`check_ai_script_hallucinations`**: Analyze Python scripts for AI hallucinations by validating imports, method calls, and class usage against the knowledge graph +8. **`query_knowledge_graph`**: Explore and query the Neo4j knowledge graph with commands like `repos`, `classes`, `methods`, and custom Cypher queries + +## Prerequisites + +- [Docker/Docker Desktop](https://www.docker.com/products/docker-desktop/) if running the MCP server as a container (recommended) +- [Python 3.12+](https://www.python.org/downloads/) if running the MCP server directly through uv +- [Supabase](https://supabase.com/) (database for RAG) +- [OpenAI API key](https://platform.openai.com/api-keys) (for generating embeddings) +- [Neo4j](https://neo4j.com/) (optional, for knowledge graph functionality) - see [Knowledge Graph Setup](#knowledge-graph-setup) section + +## Installation + +### Using Docker (Recommended) + +1. Clone this repository: + ```bash + git clone https://github.com/coleam00/mcp-crawl4ai-rag.git + cd mcp-crawl4ai-rag + ``` + +2. Build the Docker image: + ```bash + docker build -t mcp/crawl4ai-rag --build-arg PORT=8051 . + ``` + +3. Create a `.env` file based on the configuration section below + +### Using uv directly (no Docker) + +1. Clone this repository: + ```bash + git clone https://github.com/coleam00/mcp-crawl4ai-rag.git + cd mcp-crawl4ai-rag + ``` + +2. Install uv if you don't have it: + ```bash + pip install uv + ``` + +3. Create and activate a virtual environment: + ```bash + uv venv + .venv\Scripts\activate + # on Mac/Linux: source .venv/bin/activate + ``` + +4. Install dependencies: + ```bash + uv pip install -e . + crawl4ai-setup + ``` + +5. Create a `.env` file based on the configuration section below + +## Database Setup + +Before running the server, you need to set up the database with the pgvector extension: + +1. Go to the SQL Editor in your Supabase dashboard (create a new project first if necessary) + +2. Create a new query and paste the contents of `crawled_pages.sql` + +3. Run the query to create the necessary tables and functions + +## Knowledge Graph Setup (Optional) + +To enable AI hallucination detection and repository analysis features, you need to set up Neo4j. + +Also, the knowledge graph implementation isn't fully compatible with Docker yet, so I would recommend right now running directly through uv if you want to use the hallucination detection within the MCP server! + +For installing Neo4j: + +### Local AI Package (Recommended) + +The easiest way to get Neo4j running locally is with the [Local AI Package](https://github.com/coleam00/local-ai-packaged) - a curated collection of local AI services including Neo4j: + +1. **Clone the Local AI Package**: + ```bash + git clone https://github.com/coleam00/local-ai-packaged.git + cd local-ai-packaged + ``` + +2. **Start Neo4j**: + Follow the instructions in the Local AI Package repository to start Neo4j with Docker Compose + +3. **Default connection details**: + - URI: `bolt://localhost:7687` + - Username: `neo4j` + - Password: Check the Local AI Package documentation for the default password + +### Manual Neo4j Installation + +Alternatively, install Neo4j directly: + +1. **Install Neo4j Desktop**: Download from [neo4j.com/download](https://neo4j.com/download/) + +2. **Create a new database**: + - Open Neo4j Desktop + - Create a new project and database + - Set a password for the `neo4j` user + - Start the database + +3. **Note your connection details**: + - URI: `bolt://localhost:7687` (default) + - Username: `neo4j` (default) + - Password: Whatever you set during creation + +## Configuration + +Create a `.env` file in the project root with the following variables: + +``` +# MCP Server Configuration +HOST=0.0.0.0 +PORT=8051 +TRANSPORT=sse + +# OpenAI API Configuration +OPENAI_API_KEY=your_openai_api_key + +# LLM for summaries and contextual embeddings +MODEL_CHOICE=gpt-4.1-nano + +# RAG Strategies (set to "true" or "false", default to "false") +USE_CONTEXTUAL_EMBEDDINGS=false +USE_HYBRID_SEARCH=false +USE_AGENTIC_RAG=false +USE_RERANKING=false +USE_KNOWLEDGE_GRAPH=false + +# Supabase Configuration +SUPABASE_URL=your_supabase_project_url +SUPABASE_SERVICE_KEY=your_supabase_service_key + +# Neo4j Configuration (required for knowledge graph functionality) +NEO4J_URI=bolt://localhost:7687 +NEO4J_USER=neo4j +NEO4J_PASSWORD=your_neo4j_password +``` + +### RAG Strategy Options + +The Crawl4AI RAG MCP server supports four powerful RAG strategies that can be enabled independently: + +#### 1. **USE_CONTEXTUAL_EMBEDDINGS** +When enabled, this strategy enhances each chunk's embedding with additional context from the entire document. The system passes both the full document and the specific chunk to an LLM (configured via `MODEL_CHOICE`) to generate enriched context that gets embedded alongside the chunk content. + +- **When to use**: Enable this when you need high-precision retrieval where context matters, such as technical documentation where terms might have different meanings in different sections. +- **Trade-offs**: Slower indexing due to LLM calls for each chunk, but significantly better retrieval accuracy. +- **Cost**: Additional LLM API calls during indexing. + +#### 2. **USE_HYBRID_SEARCH** +Combines traditional keyword search with semantic vector search to provide more comprehensive results. The system performs both searches in parallel and intelligently merges results, prioritizing documents that appear in both result sets. + +- **When to use**: Enable this when users might search using specific technical terms, function names, or when exact keyword matches are important alongside semantic understanding. +- **Trade-offs**: Slightly slower search queries but more robust results, especially for technical content. +- **Cost**: No additional API costs, just computational overhead. + +#### 3. **USE_AGENTIC_RAG** +Enables specialized code example extraction and storage. When crawling documentation, the system identifies code blocks (≄300 characters), extracts them with surrounding context, generates summaries, and stores them in a separate vector database table specifically designed for code search. + +- **When to use**: Essential for AI coding assistants that need to find specific code examples, implementation patterns, or usage examples from documentation. +- **Trade-offs**: Significantly slower crawling due to code extraction and summarization, requires more storage space. +- **Cost**: Additional LLM API calls for summarizing each code example. +- **Benefits**: Provides a dedicated `search_code_examples` tool that AI agents can use to find specific code implementations. + +#### 4. **USE_RERANKING** +Applies cross-encoder reranking to search results after initial retrieval. Uses a lightweight cross-encoder model (`cross-encoder/ms-marco-MiniLM-L-6-v2`) to score each result against the original query, then reorders results by relevance. + +- **When to use**: Enable this when search precision is critical and you need the most relevant results at the top. Particularly useful for complex queries where semantic similarity alone might not capture query intent. +- **Trade-offs**: Adds ~100-200ms to search queries depending on result count, but significantly improves result ordering. +- **Cost**: No additional API costs - uses a local model that runs on CPU. +- **Benefits**: Better result relevance, especially for complex queries. Works with both regular RAG search and code example search. + +#### 5. **USE_KNOWLEDGE_GRAPH** +Enables AI hallucination detection and repository analysis using Neo4j knowledge graphs. When enabled, the system can parse GitHub repositories into a graph database and validate AI-generated code against real repository structures. (NOT fully compatible with Docker yet, I'd recommend running through uv) + +- **When to use**: Enable this for AI coding assistants that need to validate generated code against real implementations, or when you want to detect when AI models hallucinate non-existent methods, classes, or incorrect usage patterns. +- **Trade-offs**: Requires Neo4j setup and additional dependencies. Repository parsing can be slow for large codebases, and validation requires repositories to be pre-indexed. +- **Cost**: No additional API costs for validation, but requires Neo4j infrastructure (can use free local installation or cloud AuraDB). +- **Benefits**: Provides three powerful tools: `parse_github_repository` for indexing codebases, `check_ai_script_hallucinations` for validating AI-generated code, and `query_knowledge_graph` for exploring indexed repositories. + +You can now tell the AI coding assistant to add a Python GitHub repository to the knowledge graph like: + +"Add https://github.com/pydantic/pydantic-ai.git to the knowledge graph" + +Make sure the repo URL ends with .git. + +You can also have the AI coding assistant check for hallucinations with scripts it just created, or you can manually run the command: + +``` +python knowledge_graphs/ai_hallucination_detector.py [full path to your script to analyze] +``` + +### Recommended Configurations + +**For general documentation RAG:** +``` +USE_CONTEXTUAL_EMBEDDINGS=false +USE_HYBRID_SEARCH=true +USE_AGENTIC_RAG=false +USE_RERANKING=true +``` + +**For AI coding assistant with code examples:** +``` +USE_CONTEXTUAL_EMBEDDINGS=true +USE_HYBRID_SEARCH=true +USE_AGENTIC_RAG=true +USE_RERANKING=true +USE_KNOWLEDGE_GRAPH=false +``` + +**For AI coding assistant with hallucination detection:** +``` +USE_CONTEXTUAL_EMBEDDINGS=true +USE_HYBRID_SEARCH=true +USE_AGENTIC_RAG=true +USE_RERANKING=true +USE_KNOWLEDGE_GRAPH=true +``` + +**For fast, basic RAG:** +``` +USE_CONTEXTUAL_EMBEDDINGS=false +USE_HYBRID_SEARCH=true +USE_AGENTIC_RAG=false +USE_RERANKING=false +USE_KNOWLEDGE_GRAPH=false +``` + +## Running the Server + +### Using Docker + +```bash +docker run --env-file .env -p 8051:8051 mcp/crawl4ai-rag +``` + +### Using Python + +```bash +uv run src/crawl4ai_mcp.py +``` + +The server will start and listen on the configured host and port. + +## Integration with MCP Clients + +### SSE Configuration + +Once you have the server running with SSE transport, you can connect to it using this configuration: + +```json +{ + "mcpServers": { + "crawl4ai-rag": { + "transport": "sse", + "url": "http://localhost:8051/sse" + } + } +} +``` + +> **Note for Windsurf users**: Use `serverUrl` instead of `url` in your configuration: +> ```json +> { +> "mcpServers": { +> "crawl4ai-rag": { +> "transport": "sse", +> "serverUrl": "http://localhost:8051/sse" +> } +> } +> } +> ``` +> +> **Note for Docker users**: Use `host.docker.internal` instead of `localhost` if your client is running in a different container. This will apply if you are using this MCP server within n8n! + +> **Note for Claude Code users**: +``` +claude mcp add-json crawl4ai-rag '{"type":"http","url":"http://localhost:8051/sse"}' --scope user +``` + +### Stdio Configuration + +Add this server to your MCP configuration for Claude Desktop, Windsurf, or any other MCP client: + +```json +{ + "mcpServers": { + "crawl4ai-rag": { + "command": "python", + "args": ["path/to/crawl4ai-mcp/src/crawl4ai_mcp.py"], + "env": { + "TRANSPORT": "stdio", + "OPENAI_API_KEY": "your_openai_api_key", + "SUPABASE_URL": "your_supabase_url", + "SUPABASE_SERVICE_KEY": "your_supabase_service_key", + "USE_KNOWLEDGE_GRAPH": "false", + "NEO4J_URI": "bolt://localhost:7687", + "NEO4J_USER": "neo4j", + "NEO4J_PASSWORD": "your_neo4j_password" + } + } + } +} +``` + +### Docker with Stdio Configuration + +```json +{ + "mcpServers": { + "crawl4ai-rag": { + "command": "docker", + "args": ["run", "--rm", "-i", + "-e", "TRANSPORT", + "-e", "OPENAI_API_KEY", + "-e", "SUPABASE_URL", + "-e", "SUPABASE_SERVICE_KEY", + "-e", "USE_KNOWLEDGE_GRAPH", + "-e", "NEO4J_URI", + "-e", "NEO4J_USER", + "-e", "NEO4J_PASSWORD", + "mcp/crawl4ai"], + "env": { + "TRANSPORT": "stdio", + "OPENAI_API_KEY": "your_openai_api_key", + "SUPABASE_URL": "your_supabase_url", + "SUPABASE_SERVICE_KEY": "your_supabase_service_key", + "USE_KNOWLEDGE_GRAPH": "false", + "NEO4J_URI": "bolt://localhost:7687", + "NEO4J_USER": "neo4j", + "NEO4J_PASSWORD": "your_neo4j_password" + } + } + } +} +``` + +## Knowledge Graph Architecture + +The knowledge graph system stores repository code structure in Neo4j with the following components: + +### Core Components (`knowledge_graphs/` folder): + +- **`parse_repo_into_neo4j.py`**: Clones and analyzes GitHub repositories, extracting Python classes, methods, functions, and imports into Neo4j nodes and relationships +- **`ai_script_analyzer.py`**: Parses Python scripts using AST to extract imports, class instantiations, method calls, and function usage +- **`knowledge_graph_validator.py`**: Validates AI-generated code against the knowledge graph to detect hallucinations (non-existent methods, incorrect parameters, etc.) +- **`hallucination_reporter.py`**: Generates comprehensive reports about detected hallucinations with confidence scores and recommendations +- **`query_knowledge_graph.py`**: Interactive CLI tool for exploring the knowledge graph (functionality now integrated into MCP tools) + +### Knowledge Graph Schema: + +The Neo4j database stores code structure as: + +**Nodes:** +- `Repository`: GitHub repositories +- `File`: Python files within repositories +- `Class`: Python classes with methods and attributes +- `Method`: Class methods with parameter information +- `Function`: Standalone functions +- `Attribute`: Class attributes + +**Relationships:** +- `Repository` -[:CONTAINS]-> `File` +- `File` -[:DEFINES]-> `Class` +- `File` -[:DEFINES]-> `Function` +- `Class` -[:HAS_METHOD]-> `Method` +- `Class` -[:HAS_ATTRIBUTE]-> `Attribute` + +### Workflow: + +1. **Repository Parsing**: Use `parse_github_repository` tool to clone and analyze open-source repositories +2. **Code Validation**: Use `check_ai_script_hallucinations` tool to validate AI-generated Python scripts +3. **Knowledge Exploration**: Use `query_knowledge_graph` tool to explore available repositories, classes, and methods + +## Building Your Own Server + +This implementation provides a foundation for building more complex MCP servers with web crawling capabilities. To build your own: + +1. Add your own tools by creating methods with the `@mcp.tool()` decorator +2. Create your own lifespan function to add your own dependencies +3. Modify the `utils.py` file for any helper functions you need +4. Extend the crawling capabilities by adding more specialized crawlers diff --git a/servers/mcp-crawl4ai-rag/crawled_pages.sql b/servers/mcp-crawl4ai-rag/crawled_pages.sql new file mode 100644 index 0000000..343cdb2 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/crawled_pages.sql @@ -0,0 +1,175 @@ +-- Enable the pgvector extension +create extension if not exists vector; + +-- Drop tables if they exist (to allow rerunning the script) +drop table if exists crawled_pages; +drop table if exists code_examples; +drop table if exists sources; + +-- Create the sources table +create table sources ( + source_id text primary key, + summary text, + total_word_count integer default 0, + created_at timestamp with time zone default timezone('utc'::text, now()) not null, + updated_at timestamp with time zone default timezone('utc'::text, now()) not null +); + +-- Create the documentation chunks table +create table crawled_pages ( + id bigserial primary key, + url varchar not null, + chunk_number integer not null, + content text not null, + metadata jsonb not null default '{}'::jsonb, + source_id text not null, + embedding vector(1536), -- OpenAI embeddings are 1536 dimensions + created_at timestamp with time zone default timezone('utc'::text, now()) not null, + + -- Add a unique constraint to prevent duplicate chunks for the same URL + unique(url, chunk_number), + + -- Add foreign key constraint to sources table + foreign key (source_id) references sources(source_id) +); + +-- Create an index for better vector similarity search performance +create index on crawled_pages using ivfflat (embedding vector_cosine_ops); + +-- Create an index on metadata for faster filtering +create index idx_crawled_pages_metadata on crawled_pages using gin (metadata); + +-- Create an index on source_id for faster filtering +CREATE INDEX idx_crawled_pages_source_id ON crawled_pages (source_id); + +-- Create a function to search for documentation chunks +create or replace function match_crawled_pages ( + query_embedding vector(1536), + match_count int default 10, + filter jsonb DEFAULT '{}'::jsonb, + source_filter text DEFAULT NULL +) returns table ( + id bigint, + url varchar, + chunk_number integer, + content text, + metadata jsonb, + source_id text, + similarity float +) +language plpgsql +as $$ +#variable_conflict use_column +begin + return query + select + id, + url, + chunk_number, + content, + metadata, + source_id, + 1 - (crawled_pages.embedding <=> query_embedding) as similarity + from crawled_pages + where metadata @> filter + AND (source_filter IS NULL OR source_id = source_filter) + order by crawled_pages.embedding <=> query_embedding + limit match_count; +end; +$$; + +-- Enable RLS on the crawled_pages table +alter table crawled_pages enable row level security; + +-- Create a policy that allows anyone to read crawled_pages +create policy "Allow public read access to crawled_pages" + on crawled_pages + for select + to public + using (true); + +-- Enable RLS on the sources table +alter table sources enable row level security; + +-- Create a policy that allows anyone to read sources +create policy "Allow public read access to sources" + on sources + for select + to public + using (true); + +-- Create the code_examples table +create table code_examples ( + id bigserial primary key, + url varchar not null, + chunk_number integer not null, + content text not null, -- The code example content + summary text not null, -- Summary of the code example + metadata jsonb not null default '{}'::jsonb, + source_id text not null, + embedding vector(1536), -- OpenAI embeddings are 1536 dimensions + created_at timestamp with time zone default timezone('utc'::text, now()) not null, + + -- Add a unique constraint to prevent duplicate chunks for the same URL + unique(url, chunk_number), + + -- Add foreign key constraint to sources table + foreign key (source_id) references sources(source_id) +); + +-- Create an index for better vector similarity search performance +create index on code_examples using ivfflat (embedding vector_cosine_ops); + +-- Create an index on metadata for faster filtering +create index idx_code_examples_metadata on code_examples using gin (metadata); + +-- Create an index on source_id for faster filtering +CREATE INDEX idx_code_examples_source_id ON code_examples (source_id); + +-- Create a function to search for code examples +create or replace function match_code_examples ( + query_embedding vector(1536), + match_count int default 10, + filter jsonb DEFAULT '{}'::jsonb, + source_filter text DEFAULT NULL +) returns table ( + id bigint, + url varchar, + chunk_number integer, + content text, + summary text, + metadata jsonb, + source_id text, + similarity float +) +language plpgsql +as $$ +#variable_conflict use_column +begin + return query + select + id, + url, + chunk_number, + content, + summary, + metadata, + source_id, + 1 - (code_examples.embedding <=> query_embedding) as similarity + from code_examples + where metadata @> filter + AND (source_filter IS NULL OR source_id = source_filter) + order by code_examples.embedding <=> query_embedding + limit match_count; +end; +$$; + +-- Enable RLS on the code_examples table +alter table code_examples enable row level security; + +-- Create a policy that allows anyone to read code_examples +create policy "Allow public read access to code_examples" + on code_examples + for select + to public + using (true); \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/crawled_pages_1024.sql b/servers/mcp-crawl4ai-rag/crawled_pages_1024.sql new file mode 100644 index 0000000..275d0af --- /dev/null +++ b/servers/mcp-crawl4ai-rag/crawled_pages_1024.sql @@ -0,0 +1,226 @@ +-- Schema for Crawl4AI RAG with BGE embeddings (1024 dimensions) +-- Modified from original 1536-dim OpenAI schema + +-- Enable the pgvector extension +create extension if not exists vector; + +-- Create roles for PostgREST (Supabase-compatible setup) +DO $$ +BEGIN + IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = 'anon') THEN + CREATE ROLE anon NOLOGIN; + END IF; + IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = 'authenticated') THEN + CREATE ROLE authenticated NOLOGIN; + END IF; + IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = 'service_role') THEN + CREATE ROLE service_role NOLOGIN BYPASSRLS; + END IF; +END +$$; + +-- Grant schema usage to roles +GRANT USAGE ON SCHEMA public TO anon, authenticated, service_role; + +-- Drop tables if they exist (to allow rerunning the script) +drop table if exists crawled_pages cascade; +drop table if exists code_examples cascade; +drop table if exists sources cascade; + +-- Create the sources table +create table sources ( + source_id text primary key, + summary text, + total_word_count integer default 0, + created_at timestamp with time zone default timezone('utc'::text, now()) not null, + updated_at timestamp with time zone default timezone('utc'::text, now()) not null +); + +-- Create the documentation chunks table +create table crawled_pages ( + id bigserial primary key, + url varchar not null, + chunk_number integer not null, + content text not null, + metadata jsonb not null default '{}'::jsonb, + source_id text not null, + embedding vector(1024), -- BGE embeddings are 1024 dimensions + created_at timestamp with time zone default timezone('utc'::text, now()) not null, + + -- Add a unique constraint to prevent duplicate chunks for the same URL + unique(url, chunk_number), + + -- Add foreign key constraint to sources table + foreign key (source_id) references sources(source_id) ON DELETE CASCADE +); + +-- Create an index for better vector similarity search performance +create index on crawled_pages using ivfflat (embedding vector_cosine_ops); + +-- Create an index on metadata for faster filtering +create index idx_crawled_pages_metadata on crawled_pages using gin (metadata); + +-- Create an index on source_id for faster filtering +CREATE INDEX idx_crawled_pages_source_id ON crawled_pages (source_id); + +-- Create a function to search for documentation chunks +create or replace function match_crawled_pages ( + query_embedding vector(1024), + match_count int default 10, + filter jsonb DEFAULT '{}'::jsonb, + source_filter text DEFAULT NULL +) returns table ( + id bigint, + url varchar, + chunk_number integer, + content text, + metadata jsonb, + source_id text, + similarity float +) +language plpgsql +as $$ +#variable_conflict use_column +begin + return query + select + id, + url, + chunk_number, + content, + metadata, + source_id, + 1 - (crawled_pages.embedding <=> query_embedding) as similarity + from crawled_pages + where metadata @> filter + AND (source_filter IS NULL OR source_id = source_filter) + order by crawled_pages.embedding <=> query_embedding + limit match_count; +end; +$$; + +-- Enable RLS on the crawled_pages table +alter table crawled_pages enable row level security; + +-- Create policies for crawled_pages +create policy "Allow public read access to crawled_pages" + on crawled_pages + for select + to anon, authenticated + using (true); + +create policy "Allow service_role full access to crawled_pages" + on crawled_pages + for all + to service_role + using (true) + with check (true); + +-- Enable RLS on the sources table +alter table sources enable row level security; + +-- Create policies for sources +create policy "Allow public read access to sources" + on sources + for select + to anon, authenticated + using (true); + +create policy "Allow service_role full access to sources" + on sources + for all + to service_role + using (true) + with check (true); + +-- Create the code_examples table +create table code_examples ( + id bigserial primary key, + url varchar not null, + chunk_number integer not null, + content text not null, -- The code example content + summary text not null, -- Summary of the code example + metadata jsonb not null default '{}'::jsonb, + source_id text not null, + embedding vector(1024), -- BGE embeddings are 1024 dimensions + created_at timestamp with time zone default timezone('utc'::text, now()) not null, + + -- Add a unique constraint to prevent duplicate chunks for the same URL + unique(url, chunk_number), + + -- Add foreign key constraint to sources table + foreign key (source_id) references sources(source_id) ON DELETE CASCADE +); + +-- Create an index for better vector similarity search performance +create index on code_examples using ivfflat (embedding vector_cosine_ops); + +-- Create an index on metadata for faster filtering +create index idx_code_examples_metadata on code_examples using gin (metadata); + +-- Create an index on source_id for faster filtering +CREATE INDEX idx_code_examples_source_id ON code_examples (source_id); + +-- Create a function to search for code examples +create or replace function match_code_examples ( + query_embedding vector(1024), + match_count int default 10, + filter jsonb DEFAULT '{}'::jsonb, + source_filter text DEFAULT NULL +) returns table ( + id bigint, + url varchar, + chunk_number integer, + content text, + summary text, + metadata jsonb, + source_id text, + similarity float +) +language plpgsql +as $$ +#variable_conflict use_column +begin + return query + select + id, + url, + chunk_number, + content, + summary, + metadata, + source_id, + 1 - (code_examples.embedding <=> query_embedding) as similarity + from code_examples + where metadata @> filter + AND (source_filter IS NULL OR source_id = source_filter) + order by code_examples.embedding <=> query_embedding + limit match_count; +end; +$$; + +-- Enable RLS on the code_examples table +alter table code_examples enable row level security; + +-- Create policies for code_examples +create policy "Allow public read access to code_examples" + on code_examples + for select + to anon, authenticated + using (true); + +create policy "Allow service_role full access to code_examples" + on code_examples + for all + to service_role + using (true) + with check (true); + +-- Grant table access to roles +GRANT SELECT ON sources, crawled_pages, code_examples TO anon, authenticated; +GRANT ALL ON sources, crawled_pages, code_examples TO service_role; +GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO anon, authenticated, service_role; + +-- Grant function execute permissions +GRANT EXECUTE ON FUNCTION match_crawled_pages TO anon, authenticated, service_role; +GRANT EXECUTE ON FUNCTION match_code_examples TO anon, authenticated, service_role; diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/ai_hallucination_detector.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/ai_hallucination_detector.py new file mode 100644 index 0000000..a3e72e5 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/ai_hallucination_detector.py @@ -0,0 +1,335 @@ +""" +AI Hallucination Detector + +Main orchestrator for detecting AI coding assistant hallucinations in Python scripts. +Combines AST analysis, knowledge graph validation, and comprehensive reporting. +""" + +import asyncio +import argparse +import logging +import os +import sys +from pathlib import Path +from typing import Optional, List + +from dotenv import load_dotenv + +from ai_script_analyzer import AIScriptAnalyzer, analyze_ai_script +from knowledge_graph_validator import KnowledgeGraphValidator +from hallucination_reporter import HallucinationReporter + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' +) +logger = logging.getLogger(__name__) + + +class AIHallucinationDetector: + """Main detector class that orchestrates the entire process""" + + def __init__(self, neo4j_uri: str, neo4j_user: str, neo4j_password: str): + self.validator = KnowledgeGraphValidator(neo4j_uri, neo4j_user, neo4j_password) + self.reporter = HallucinationReporter() + self.analyzer = AIScriptAnalyzer() + + async def initialize(self): + """Initialize connections and components""" + await self.validator.initialize() + logger.info("AI Hallucination Detector initialized successfully") + + async def close(self): + """Close connections""" + await self.validator.close() + + async def detect_hallucinations(self, script_path: str, + output_dir: Optional[str] = None, + save_json: bool = True, + save_markdown: bool = True, + print_summary: bool = True) -> dict: + """ + Main detection function that analyzes a script and generates reports + + Args: + script_path: Path to the AI-generated Python script + output_dir: Directory to save reports (defaults to script directory) + save_json: Whether to save JSON report + save_markdown: Whether to save Markdown report + print_summary: Whether to print summary to console + + Returns: + Complete validation report as dictionary + """ + logger.info(f"Starting hallucination detection for: {script_path}") + + # Validate input + if not os.path.exists(script_path): + raise FileNotFoundError(f"Script not found: {script_path}") + + if not script_path.endswith('.py'): + raise ValueError("Only Python (.py) files are supported") + + # Set output directory + if output_dir is None: + output_dir = str(Path(script_path).parent) + + os.makedirs(output_dir, exist_ok=True) + + try: + # Step 1: Analyze the script using AST + logger.info("Step 1: Analyzing script structure...") + analysis_result = self.analyzer.analyze_script(script_path) + + if analysis_result.errors: + logger.warning(f"Analysis warnings: {analysis_result.errors}") + + logger.info(f"Found: {len(analysis_result.imports)} imports, " + f"{len(analysis_result.class_instantiations)} class instantiations, " + f"{len(analysis_result.method_calls)} method calls, " + f"{len(analysis_result.function_calls)} function calls, " + f"{len(analysis_result.attribute_accesses)} attribute accesses") + + # Step 2: Validate against knowledge graph + logger.info("Step 2: Validating against knowledge graph...") + validation_result = await self.validator.validate_script(analysis_result) + + logger.info(f"Validation complete. Overall confidence: {validation_result.overall_confidence:.1%}") + + # Step 3: Generate comprehensive report + logger.info("Step 3: Generating reports...") + report = self.reporter.generate_comprehensive_report(validation_result) + + # Step 4: Save reports + script_name = Path(script_path).stem + + if save_json: + json_path = os.path.join(output_dir, f"{script_name}_hallucination_report.json") + self.reporter.save_json_report(report, json_path) + + if save_markdown: + md_path = os.path.join(output_dir, f"{script_name}_hallucination_report.md") + self.reporter.save_markdown_report(report, md_path) + + # Step 5: Print summary + if print_summary: + self.reporter.print_summary(report) + + logger.info("Hallucination detection completed successfully") + return report + + except Exception as e: + logger.error(f"Error during hallucination detection: {str(e)}") + raise + + async def batch_detect(self, script_paths: List[str], + output_dir: Optional[str] = None) -> List[dict]: + """ + Detect hallucinations in multiple scripts + + Args: + script_paths: List of paths to Python scripts + output_dir: Directory to save all reports + + Returns: + List of validation reports + """ + logger.info(f"Starting batch detection for {len(script_paths)} scripts") + + results = [] + for i, script_path in enumerate(script_paths, 1): + logger.info(f"Processing script {i}/{len(script_paths)}: {script_path}") + + try: + result = await self.detect_hallucinations( + script_path=script_path, + output_dir=output_dir, + print_summary=False # Don't print individual summaries in batch mode + ) + results.append(result) + + except Exception as e: + logger.error(f"Failed to process {script_path}: {str(e)}") + # Continue with other scripts + continue + + # Print batch summary + self._print_batch_summary(results) + + return results + + def _print_batch_summary(self, results: List[dict]): + """Print summary of batch processing results""" + if not results: + print("No scripts were successfully processed.") + return + + print("\n" + "="*80) + print("šŸš€ BATCH HALLUCINATION DETECTION SUMMARY") + print("="*80) + + total_scripts = len(results) + total_validations = sum(r['validation_summary']['total_validations'] for r in results) + total_valid = sum(r['validation_summary']['valid_count'] for r in results) + total_invalid = sum(r['validation_summary']['invalid_count'] for r in results) + total_not_found = sum(r['validation_summary']['not_found_count'] for r in results) + total_hallucinations = sum(len(r['hallucinations_detected']) for r in results) + + avg_confidence = sum(r['validation_summary']['overall_confidence'] for r in results) / total_scripts + + print(f"Scripts Processed: {total_scripts}") + print(f"Total Validations: {total_validations}") + print(f"Average Confidence: {avg_confidence:.1%}") + print(f"Total Hallucinations: {total_hallucinations}") + + print(f"\nAggregated Results:") + print(f" āœ… Valid: {total_valid} ({total_valid/total_validations:.1%})") + print(f" āŒ Invalid: {total_invalid} ({total_invalid/total_validations:.1%})") + print(f" šŸ” Not Found: {total_not_found} ({total_not_found/total_validations:.1%})") + + # Show worst performing scripts + print(f"\n🚨 Scripts with Most Hallucinations:") + sorted_results = sorted(results, key=lambda x: len(x['hallucinations_detected']), reverse=True) + for result in sorted_results[:5]: + script_name = Path(result['analysis_metadata']['script_path']).name + hall_count = len(result['hallucinations_detected']) + confidence = result['validation_summary']['overall_confidence'] + print(f" - {script_name}: {hall_count} hallucinations ({confidence:.1%} confidence)") + + print("="*80) + + +async def main(): + """Command-line interface for the AI Hallucination Detector""" + parser = argparse.ArgumentParser( + description="Detect AI coding assistant hallucinations in Python scripts", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Analyze single script + python ai_hallucination_detector.py script.py + + # Analyze multiple scripts + python ai_hallucination_detector.py script1.py script2.py script3.py + + # Specify output directory + python ai_hallucination_detector.py script.py --output-dir reports/ + + # Skip markdown report + python ai_hallucination_detector.py script.py --no-markdown + """ + ) + + parser.add_argument( + 'scripts', + nargs='+', + help='Python script(s) to analyze for hallucinations' + ) + + parser.add_argument( + '--output-dir', + help='Directory to save reports (defaults to script directory)' + ) + + parser.add_argument( + '--no-json', + action='store_true', + help='Skip JSON report generation' + ) + + parser.add_argument( + '--no-markdown', + action='store_true', + help='Skip Markdown report generation' + ) + + parser.add_argument( + '--no-summary', + action='store_true', + help='Skip printing summary to console' + ) + + parser.add_argument( + '--neo4j-uri', + default=None, + help='Neo4j URI (default: from environment NEO4J_URI)' + ) + + parser.add_argument( + '--neo4j-user', + default=None, + help='Neo4j username (default: from environment NEO4J_USER)' + ) + + parser.add_argument( + '--neo4j-password', + default=None, + help='Neo4j password (default: from environment NEO4J_PASSWORD)' + ) + + parser.add_argument( + '--verbose', + action='store_true', + help='Enable verbose logging' + ) + + args = parser.parse_args() + + if args.verbose: + logging.getLogger().setLevel(logging.INFO) + # Only enable debug for our modules, not neo4j + logging.getLogger('neo4j').setLevel(logging.WARNING) + logging.getLogger('neo4j.pool').setLevel(logging.WARNING) + logging.getLogger('neo4j.io').setLevel(logging.WARNING) + + # Load environment variables + load_dotenv() + + # Get Neo4j credentials + neo4j_uri = args.neo4j_uri or os.environ.get('NEO4J_URI', 'bolt://localhost:7687') + neo4j_user = args.neo4j_user or os.environ.get('NEO4J_USER', 'neo4j') + neo4j_password = args.neo4j_password or os.environ.get('NEO4J_PASSWORD', 'password') + + if not neo4j_password or neo4j_password == 'password': + logger.error("Please set NEO4J_PASSWORD environment variable or use --neo4j-password") + sys.exit(1) + + # Initialize detector + detector = AIHallucinationDetector(neo4j_uri, neo4j_user, neo4j_password) + + try: + await detector.initialize() + + # Process scripts + if len(args.scripts) == 1: + # Single script mode + await detector.detect_hallucinations( + script_path=args.scripts[0], + output_dir=args.output_dir, + save_json=not args.no_json, + save_markdown=not args.no_markdown, + print_summary=not args.no_summary + ) + else: + # Batch mode + await detector.batch_detect( + script_paths=args.scripts, + output_dir=args.output_dir + ) + + except KeyboardInterrupt: + logger.info("Detection interrupted by user") + sys.exit(1) + + except Exception as e: + logger.error(f"Detection failed: {str(e)}") + sys.exit(1) + + finally: + await detector.close() + + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/ai_script_analyzer.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/ai_script_analyzer.py new file mode 100644 index 0000000..006e91e --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/ai_script_analyzer.py @@ -0,0 +1,532 @@ +""" +AI Script Analyzer + +Parses Python scripts generated by AI coding assistants using AST to extract: +- Import statements and their usage +- Class instantiations and method calls +- Function calls with parameters +- Attribute access patterns +- Variable type tracking +""" + +import ast +import logging +from pathlib import Path +from typing import Dict, List, Set, Any, Optional, Tuple +from dataclasses import dataclass, field + +logger = logging.getLogger(__name__) + + +@dataclass +class ImportInfo: + """Information about an import statement""" + module: str + name: str + alias: Optional[str] = None + is_from_import: bool = False + line_number: int = 0 + + +@dataclass +class MethodCall: + """Information about a method call""" + object_name: str + method_name: str + args: List[str] + kwargs: Dict[str, str] + line_number: int + object_type: Optional[str] = None # Inferred class type + + +@dataclass +class AttributeAccess: + """Information about attribute access""" + object_name: str + attribute_name: str + line_number: int + object_type: Optional[str] = None # Inferred class type + + +@dataclass +class FunctionCall: + """Information about a function call""" + function_name: str + args: List[str] + kwargs: Dict[str, str] + line_number: int + full_name: Optional[str] = None # Module.function_name + + +@dataclass +class ClassInstantiation: + """Information about class instantiation""" + variable_name: str + class_name: str + args: List[str] + kwargs: Dict[str, str] + line_number: int + full_class_name: Optional[str] = None # Module.ClassName + + +@dataclass +class AnalysisResult: + """Complete analysis results for a Python script""" + file_path: str + imports: List[ImportInfo] = field(default_factory=list) + class_instantiations: List[ClassInstantiation] = field(default_factory=list) + method_calls: List[MethodCall] = field(default_factory=list) + attribute_accesses: List[AttributeAccess] = field(default_factory=list) + function_calls: List[FunctionCall] = field(default_factory=list) + variable_types: Dict[str, str] = field(default_factory=dict) # variable_name -> class_type + errors: List[str] = field(default_factory=list) + + +class AIScriptAnalyzer: + """Analyzes AI-generated Python scripts for validation against knowledge graph""" + + def __init__(self): + self.import_map: Dict[str, str] = {} # alias -> actual_module_name + self.variable_types: Dict[str, str] = {} # variable_name -> class_type + self.context_manager_vars: Dict[str, Tuple[int, int, str]] = {} # var_name -> (start_line, end_line, type) + + def analyze_script(self, script_path: str) -> AnalysisResult: + """Analyze a Python script and extract all relevant information""" + try: + with open(script_path, 'r', encoding='utf-8') as f: + content = f.read() + + tree = ast.parse(content) + result = AnalysisResult(file_path=script_path) + + # Reset state for new analysis + self.import_map.clear() + self.variable_types.clear() + self.context_manager_vars.clear() + + # Track processed nodes to avoid duplicates + self.processed_calls = set() + self.method_call_attributes = set() + + # First pass: collect imports and build import map + for node in ast.walk(tree): + if isinstance(node, (ast.Import, ast.ImportFrom)): + self._extract_imports(node, result) + + # Second pass: analyze usage patterns + for node in ast.walk(tree): + self._analyze_node(node, result) + + # Set inferred types on method calls and attribute accesses + self._infer_object_types(result) + + result.variable_types = self.variable_types.copy() + + return result + + except Exception as e: + error_msg = f"Failed to analyze script {script_path}: {str(e)}" + logger.error(error_msg) + result = AnalysisResult(file_path=script_path) + result.errors.append(error_msg) + return result + + def _extract_imports(self, node: ast.AST, result: AnalysisResult): + """Extract import information and build import mapping""" + line_num = getattr(node, 'lineno', 0) + + if isinstance(node, ast.Import): + for alias in node.names: + import_name = alias.name + alias_name = alias.asname or import_name + + result.imports.append(ImportInfo( + module=import_name, + name=import_name, + alias=alias.asname, + is_from_import=False, + line_number=line_num + )) + + self.import_map[alias_name] = import_name + + elif isinstance(node, ast.ImportFrom): + module = node.module or "" + for alias in node.names: + import_name = alias.name + alias_name = alias.asname or import_name + + result.imports.append(ImportInfo( + module=module, + name=import_name, + alias=alias.asname, + is_from_import=True, + line_number=line_num + )) + + # Map alias to full module.name + if module: + full_name = f"{module}.{import_name}" + self.import_map[alias_name] = full_name + else: + self.import_map[alias_name] = import_name + + def _analyze_node(self, node: ast.AST, result: AnalysisResult): + """Analyze individual AST nodes for usage patterns""" + line_num = getattr(node, 'lineno', 0) + + # Assignments (class instantiations and method call results) + if isinstance(node, ast.Assign): + if len(node.targets) == 1 and isinstance(node.targets[0], ast.Name): + if isinstance(node.value, ast.Call): + # Check if it's a class instantiation or method call + if isinstance(node.value.func, ast.Name): + # Direct function/class call + self._extract_class_instantiation(node, result) + # Mark this call as processed to avoid duplicate processing + self.processed_calls.add(id(node.value)) + elif isinstance(node.value.func, ast.Attribute): + # Method call - track the variable assignment for type inference + var_name = node.targets[0].id + self._track_method_result_assignment(node.value, var_name) + # Still process the method call + self._extract_method_call(node.value, result) + self.processed_calls.add(id(node.value)) + + # AsyncWith statements (context managers) + elif isinstance(node, ast.AsyncWith): + self._handle_async_with(node, result) + elif isinstance(node, ast.With): + self._handle_with(node, result) + + # Method calls and function calls + elif isinstance(node, ast.Call): + # Skip if this call was already processed as part of an assignment + if id(node) in self.processed_calls: + return + + if isinstance(node.func, ast.Attribute): + self._extract_method_call(node, result) + # Mark this attribute as used in method call to avoid duplicate processing + self.method_call_attributes.add(id(node.func)) + elif isinstance(node.func, ast.Name): + # Check if this is likely a class instantiation (based on imported classes) + func_name = node.func.id + full_name = self._resolve_full_name(func_name) + + # If this is a known imported class, treat as class instantiation + if self._is_likely_class_instantiation(func_name, full_name): + self._extract_nested_class_instantiation(node, result) + else: + self._extract_function_call(node, result) + + # Attribute access (not in call context) + elif isinstance(node, ast.Attribute): + # Skip if this attribute was already processed as part of a method call + if id(node) in self.method_call_attributes: + return + self._extract_attribute_access(node, result) + + def _extract_class_instantiation(self, node: ast.Assign, result: AnalysisResult): + """Extract class instantiation from assignment""" + target = node.targets[0] + call = node.value + line_num = getattr(node, 'lineno', 0) + + if isinstance(target, ast.Name) and isinstance(call, ast.Call): + var_name = target.id + class_name = self._get_name_from_call(call.func) + + if class_name: + args = [self._get_arg_representation(arg) for arg in call.args] + kwargs = { + kw.arg: self._get_arg_representation(kw.value) + for kw in call.keywords if kw.arg + } + + # Resolve full class name using import map + full_class_name = self._resolve_full_name(class_name) + + instantiation = ClassInstantiation( + variable_name=var_name, + class_name=class_name, + args=args, + kwargs=kwargs, + line_number=line_num, + full_class_name=full_class_name + ) + + result.class_instantiations.append(instantiation) + + # Track variable type for later method call analysis + self.variable_types[var_name] = full_class_name or class_name + + def _extract_method_call(self, node: ast.Call, result: AnalysisResult): + """Extract method call information""" + if isinstance(node.func, ast.Attribute): + line_num = getattr(node, 'lineno', 0) + + # Get object and method names + obj_name = self._get_name_from_node(node.func.value) + method_name = node.func.attr + + if obj_name and method_name: + args = [self._get_arg_representation(arg) for arg in node.args] + kwargs = { + kw.arg: self._get_arg_representation(kw.value) + for kw in node.keywords if kw.arg + } + + method_call = MethodCall( + object_name=obj_name, + method_name=method_name, + args=args, + kwargs=kwargs, + line_number=line_num, + object_type=self.variable_types.get(obj_name) + ) + + result.method_calls.append(method_call) + + def _extract_function_call(self, node: ast.Call, result: AnalysisResult): + """Extract function call information""" + if isinstance(node.func, ast.Name): + line_num = getattr(node, 'lineno', 0) + func_name = node.func.id + + args = [self._get_arg_representation(arg) for arg in node.args] + kwargs = { + kw.arg: self._get_arg_representation(kw.value) + for kw in node.keywords if kw.arg + } + + # Resolve full function name using import map + full_func_name = self._resolve_full_name(func_name) + + function_call = FunctionCall( + function_name=func_name, + args=args, + kwargs=kwargs, + line_number=line_num, + full_name=full_func_name + ) + + result.function_calls.append(function_call) + + def _extract_attribute_access(self, node: ast.Attribute, result: AnalysisResult): + """Extract attribute access information""" + line_num = getattr(node, 'lineno', 0) + + obj_name = self._get_name_from_node(node.value) + attr_name = node.attr + + if obj_name and attr_name: + attribute_access = AttributeAccess( + object_name=obj_name, + attribute_name=attr_name, + line_number=line_num, + object_type=self.variable_types.get(obj_name) + ) + + result.attribute_accesses.append(attribute_access) + + def _infer_object_types(self, result: AnalysisResult): + """Update object types for method calls and attribute accesses""" + for method_call in result.method_calls: + if not method_call.object_type: + # First check context manager variables + obj_type = self._get_context_aware_type(method_call.object_name, method_call.line_number) + if obj_type: + method_call.object_type = obj_type + else: + method_call.object_type = self.variable_types.get(method_call.object_name) + + for attr_access in result.attribute_accesses: + if not attr_access.object_type: + # First check context manager variables + obj_type = self._get_context_aware_type(attr_access.object_name, attr_access.line_number) + if obj_type: + attr_access.object_type = obj_type + else: + attr_access.object_type = self.variable_types.get(attr_access.object_name) + + def _get_context_aware_type(self, var_name: str, line_number: int) -> Optional[str]: + """Get the type of a variable considering its context (e.g., async with scope)""" + if var_name in self.context_manager_vars: + start_line, end_line, var_type = self.context_manager_vars[var_name] + if start_line <= line_number <= end_line: + return var_type + return None + + def _get_name_from_call(self, node: ast.AST) -> Optional[str]: + """Get the name from a call node (for class instantiation)""" + if isinstance(node, ast.Name): + return node.id + elif isinstance(node, ast.Attribute): + value_name = self._get_name_from_node(node.value) + if value_name: + return f"{value_name}.{node.attr}" + return None + + def _get_name_from_node(self, node: ast.AST) -> Optional[str]: + """Get string representation of a node (for object names)""" + if isinstance(node, ast.Name): + return node.id + elif isinstance(node, ast.Attribute): + value_name = self._get_name_from_node(node.value) + if value_name: + return f"{value_name}.{node.attr}" + return None + + def _get_arg_representation(self, node: ast.AST) -> str: + """Get string representation of an argument""" + if isinstance(node, ast.Constant): + return repr(node.value) + elif isinstance(node, ast.Name): + return node.id + elif isinstance(node, ast.Attribute): + return self._get_name_from_node(node) or "" + elif isinstance(node, ast.Call): + func_name = self._get_name_from_call(node.func) + return f"{func_name}(...)" if func_name else "call(...)" + else: + return f"<{type(node).__name__}>" + + def _is_likely_class_instantiation(self, func_name: str, full_name: Optional[str]) -> bool: + """Determine if a function call is likely a class instantiation""" + # Check if it's a known imported class (classes typically start with uppercase) + if func_name and func_name[0].isupper(): + return True + + # Check if the full name suggests a class (contains known class patterns) + if full_name: + # Common class patterns in module names + class_patterns = [ + 'Model', 'Provider', 'Client', 'Agent', 'Manager', 'Handler', + 'Builder', 'Factory', 'Service', 'Controller', 'Processor' + ] + return any(pattern in full_name for pattern in class_patterns) + + return False + + def _extract_nested_class_instantiation(self, node: ast.Call, result: AnalysisResult): + """Extract class instantiation that's not in direct assignment (e.g., as parameter)""" + line_num = getattr(node, 'lineno', 0) + + if isinstance(node.func, ast.Name): + class_name = node.func.id + + args = [self._get_arg_representation(arg) for arg in node.args] + kwargs = { + kw.arg: self._get_arg_representation(kw.value) + for kw in node.keywords if kw.arg + } + + # Resolve full class name using import map + full_class_name = self._resolve_full_name(class_name) + + # Use a synthetic variable name since this isn't assigned to a variable + var_name = f"<{class_name.lower()}_instance>" + + instantiation = ClassInstantiation( + variable_name=var_name, + class_name=class_name, + args=args, + kwargs=kwargs, + line_number=line_num, + full_class_name=full_class_name + ) + + result.class_instantiations.append(instantiation) + + def _track_method_result_assignment(self, call_node: ast.Call, var_name: str): + """Track when a variable is assigned the result of a method call""" + if isinstance(call_node.func, ast.Attribute): + # For now, we'll use a generic type hint for method results + # In a more sophisticated system, we could look up the return type + self.variable_types[var_name] = "method_result" + + def _handle_async_with(self, node: ast.AsyncWith, result: AnalysisResult): + """Handle async with statements and track context manager variables""" + for item in node.items: + if item.optional_vars and isinstance(item.optional_vars, ast.Name): + var_name = item.optional_vars.id + + # If the context manager is a method call, track the result type + if isinstance(item.context_expr, ast.Call) and isinstance(item.context_expr.func, ast.Attribute): + # Extract and process the method call + self._extract_method_call(item.context_expr, result) + self.processed_calls.add(id(item.context_expr)) + + # Track context manager scope for pydantic_ai run_stream calls + obj_name = self._get_name_from_node(item.context_expr.func.value) + method_name = item.context_expr.func.attr + + if (obj_name and obj_name in self.variable_types and + 'pydantic_ai' in str(self.variable_types[obj_name]) and + method_name == 'run_stream'): + + # Calculate the scope of this async with block + start_line = getattr(node, 'lineno', 0) + end_line = getattr(node, 'end_lineno', start_line + 50) # fallback estimate + + # For run_stream, the return type is specifically StreamedRunResult + # This is the actual return type, not a generic placeholder + self.context_manager_vars[var_name] = (start_line, end_line, "pydantic_ai.StreamedRunResult") + + def _handle_with(self, node: ast.With, result: AnalysisResult): + """Handle regular with statements and track context manager variables""" + for item in node.items: + if item.optional_vars and isinstance(item.optional_vars, ast.Name): + var_name = item.optional_vars.id + + # If the context manager is a method call, track the result type + if isinstance(item.context_expr, ast.Call) and isinstance(item.context_expr.func, ast.Attribute): + # Extract and process the method call + self._extract_method_call(item.context_expr, result) + self.processed_calls.add(id(item.context_expr)) + + # Track basic type information + self.variable_types[var_name] = "context_manager_result" + + def _resolve_full_name(self, name: str) -> Optional[str]: + """Resolve a name to its full module.name using import map""" + # Check if it's a direct import mapping + if name in self.import_map: + return self.import_map[name] + + # Check if it's a dotted name with first part in import map + parts = name.split('.') + if len(parts) > 1 and parts[0] in self.import_map: + base_module = self.import_map[parts[0]] + return f"{base_module}.{'.'.join(parts[1:])}" + + return None + + +def analyze_ai_script(script_path: str) -> AnalysisResult: + """Convenience function to analyze a single AI-generated script""" + analyzer = AIScriptAnalyzer() + return analyzer.analyze_script(script_path) + + +if __name__ == "__main__": + # Example usage + import sys + + if len(sys.argv) != 2: + print("Usage: python ai_script_analyzer.py ") + sys.exit(1) + + script_path = sys.argv[1] + result = analyze_ai_script(script_path) + + print(f"Analysis Results for: {result.file_path}") + print(f"Imports: {len(result.imports)}") + print(f"Class Instantiations: {len(result.class_instantiations)}") + print(f"Method Calls: {len(result.method_calls)}") + print(f"Function Calls: {len(result.function_calls)}") + print(f"Attribute Accesses: {len(result.attribute_accesses)}") + + if result.errors: + print(f"Errors: {result.errors}") \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/hallucination_reporter.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/hallucination_reporter.py new file mode 100644 index 0000000..47dc2b4 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/hallucination_reporter.py @@ -0,0 +1,523 @@ +""" +Hallucination Reporter + +Generates comprehensive reports about AI coding assistant hallucinations +detected in Python scripts. Supports multiple output formats. +""" + +import json +import logging +from datetime import datetime, timezone +from pathlib import Path +from typing import Dict, List, Any, Optional + +from knowledge_graph_validator import ( + ScriptValidationResult, ValidationStatus, ValidationResult +) + +logger = logging.getLogger(__name__) + + +class HallucinationReporter: + """Generates reports about detected hallucinations""" + + def __init__(self): + self.report_timestamp = datetime.now(timezone.utc) + + def generate_comprehensive_report(self, validation_result: ScriptValidationResult) -> Dict[str, Any]: + """Generate a comprehensive report in JSON format""" + + # Categorize validations by status (knowledge graph items only) + valid_items = [] + invalid_items = [] + uncertain_items = [] + not_found_items = [] + + # Process imports (only knowledge graph ones) + for val in validation_result.import_validations: + if not val.validation.details.get('in_knowledge_graph', False): + continue # Skip external libraries + item = { + 'type': 'IMPORT', + 'name': val.import_info.module, + 'line': val.import_info.line_number, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence, + 'message': val.validation.message, + 'details': { + 'is_from_import': val.import_info.is_from_import, + 'alias': val.import_info.alias, + 'available_classes': val.available_classes, + 'available_functions': val.available_functions + } + } + self._categorize_item(item, val.validation.status, valid_items, invalid_items, uncertain_items, not_found_items) + + # Process classes (only knowledge graph ones) + for val in validation_result.class_validations: + class_name = val.class_instantiation.full_class_name or val.class_instantiation.class_name + if not self._is_from_knowledge_graph(class_name, validation_result): + continue # Skip external classes + item = { + 'type': 'CLASS_INSTANTIATION', + 'name': val.class_instantiation.class_name, + 'full_name': val.class_instantiation.full_class_name, + 'variable': val.class_instantiation.variable_name, + 'line': val.class_instantiation.line_number, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence, + 'message': val.validation.message, + 'details': { + 'args_provided': val.class_instantiation.args, + 'kwargs_provided': list(val.class_instantiation.kwargs.keys()), + 'constructor_params': val.constructor_params, + 'parameter_validation': self._serialize_validation_result(val.parameter_validation) if val.parameter_validation else None + } + } + self._categorize_item(item, val.validation.status, valid_items, invalid_items, uncertain_items, not_found_items) + + # Track reported items to avoid duplicates + reported_items = set() + + # Process methods (only knowledge graph ones) + for val in validation_result.method_validations: + if not (val.method_call.object_type and self._is_from_knowledge_graph(val.method_call.object_type, validation_result)): + continue # Skip external methods + + # Create unique key to avoid duplicates + key = (val.method_call.line_number, val.method_call.method_name, val.method_call.object_type) + if key not in reported_items: + reported_items.add(key) + item = { + 'type': 'METHOD_CALL', + 'name': val.method_call.method_name, + 'object': val.method_call.object_name, + 'object_type': val.method_call.object_type, + 'line': val.method_call.line_number, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence, + 'message': val.validation.message, + 'details': { + 'args_provided': val.method_call.args, + 'kwargs_provided': list(val.method_call.kwargs.keys()), + 'expected_params': val.expected_params, + 'parameter_validation': self._serialize_validation_result(val.parameter_validation) if val.parameter_validation else None, + 'suggestions': val.validation.suggestions + } + } + self._categorize_item(item, val.validation.status, valid_items, invalid_items, uncertain_items, not_found_items) + + # Process attributes (only knowledge graph ones) - but skip if already reported as method + for val in validation_result.attribute_validations: + if not (val.attribute_access.object_type and self._is_from_knowledge_graph(val.attribute_access.object_type, validation_result)): + continue # Skip external attributes + + # Create unique key - if this was already reported as a method, skip it + key = (val.attribute_access.line_number, val.attribute_access.attribute_name, val.attribute_access.object_type) + if key not in reported_items: + reported_items.add(key) + item = { + 'type': 'ATTRIBUTE_ACCESS', + 'name': val.attribute_access.attribute_name, + 'object': val.attribute_access.object_name, + 'object_type': val.attribute_access.object_type, + 'line': val.attribute_access.line_number, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence, + 'message': val.validation.message, + 'details': { + 'expected_type': val.expected_type + } + } + self._categorize_item(item, val.validation.status, valid_items, invalid_items, uncertain_items, not_found_items) + + # Process functions (only knowledge graph ones) + for val in validation_result.function_validations: + if not (val.function_call.full_name and self._is_from_knowledge_graph(val.function_call.full_name, validation_result)): + continue # Skip external functions + item = { + 'type': 'FUNCTION_CALL', + 'name': val.function_call.function_name, + 'full_name': val.function_call.full_name, + 'line': val.function_call.line_number, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence, + 'message': val.validation.message, + 'details': { + 'args_provided': val.function_call.args, + 'kwargs_provided': list(val.function_call.kwargs.keys()), + 'expected_params': val.expected_params, + 'parameter_validation': self._serialize_validation_result(val.parameter_validation) if val.parameter_validation else None + } + } + self._categorize_item(item, val.validation.status, valid_items, invalid_items, uncertain_items, not_found_items) + + # Create library summary + library_summary = self._create_library_summary(validation_result) + + # Generate report + report = { + 'analysis_metadata': { + 'script_path': validation_result.script_path, + 'analysis_timestamp': self.report_timestamp.isoformat(), + 'total_imports': len(validation_result.import_validations), + 'total_classes': len(validation_result.class_validations), + 'total_methods': len(validation_result.method_validations), + 'total_attributes': len(validation_result.attribute_validations), + 'total_functions': len(validation_result.function_validations) + }, + 'validation_summary': { + 'overall_confidence': validation_result.overall_confidence, + 'total_validations': len(valid_items) + len(invalid_items) + len(uncertain_items) + len(not_found_items), + 'valid_count': len(valid_items), + 'invalid_count': len(invalid_items), + 'uncertain_count': len(uncertain_items), + 'not_found_count': len(not_found_items), + 'hallucination_rate': len(invalid_items + not_found_items) / max(1, len(valid_items) + len(invalid_items) + len(not_found_items)) + }, + 'libraries_analyzed': library_summary, + 'validation_details': { + 'valid_items': valid_items, + 'invalid_items': invalid_items, + 'uncertain_items': uncertain_items, + 'not_found_items': not_found_items + }, + 'hallucinations_detected': validation_result.hallucinations_detected, + 'recommendations': self._generate_recommendations(validation_result) + } + + return report + + def _is_from_knowledge_graph(self, item_name: str, validation_result) -> bool: + """Check if an item is from a knowledge graph module""" + if not item_name: + return False + + # Get knowledge graph modules from import validations + kg_modules = set() + for val in validation_result.import_validations: + if val.validation.details.get('in_knowledge_graph', False): + kg_modules.add(val.import_info.module) + if '.' in val.import_info.module: + kg_modules.add(val.import_info.module.split('.')[0]) + + # Check if the item belongs to any knowledge graph module + if '.' in item_name: + base_module = item_name.split('.')[0] + return base_module in kg_modules + + return any(item_name in module or module.endswith(item_name) for module in kg_modules) + + def _serialize_validation_result(self, validation_result) -> Dict[str, Any]: + """Convert ValidationResult to JSON-serializable dictionary""" + if validation_result is None: + return None + + return { + 'status': validation_result.status.value, + 'confidence': validation_result.confidence, + 'message': validation_result.message, + 'details': validation_result.details, + 'suggestions': validation_result.suggestions + } + + def _categorize_item(self, item: Dict[str, Any], status: ValidationStatus, + valid_items: List, invalid_items: List, uncertain_items: List, not_found_items: List): + """Categorize validation item by status""" + if status == ValidationStatus.VALID: + valid_items.append(item) + elif status == ValidationStatus.INVALID: + invalid_items.append(item) + elif status == ValidationStatus.UNCERTAIN: + uncertain_items.append(item) + elif status == ValidationStatus.NOT_FOUND: + not_found_items.append(item) + + def _create_library_summary(self, validation_result: ScriptValidationResult) -> List[Dict[str, Any]]: + """Create summary of libraries analyzed""" + library_stats = {} + + # Aggregate stats by library/module + for val in validation_result.import_validations: + module = val.import_info.module + if module not in library_stats: + library_stats[module] = { + 'module_name': module, + 'import_status': val.validation.status.value, + 'import_confidence': val.validation.confidence, + 'classes_used': [], + 'methods_called': [], + 'attributes_accessed': [], + 'functions_called': [] + } + + # Add class usage + for val in validation_result.class_validations: + class_name = val.class_instantiation.class_name + full_name = val.class_instantiation.full_class_name + + # Try to match to library + if full_name: + parts = full_name.split('.') + if len(parts) > 1: + module = '.'.join(parts[:-1]) + if module in library_stats: + library_stats[module]['classes_used'].append({ + 'class_name': class_name, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence + }) + + # Add method usage + for val in validation_result.method_validations: + method_name = val.method_call.method_name + object_type = val.method_call.object_type + + if object_type: + parts = object_type.split('.') + if len(parts) > 1: + module = '.'.join(parts[:-1]) + if module in library_stats: + library_stats[module]['methods_called'].append({ + 'method_name': method_name, + 'class_name': parts[-1], + 'status': val.validation.status.value, + 'confidence': val.validation.confidence + }) + + # Add attribute usage + for val in validation_result.attribute_validations: + attr_name = val.attribute_access.attribute_name + object_type = val.attribute_access.object_type + + if object_type: + parts = object_type.split('.') + if len(parts) > 1: + module = '.'.join(parts[:-1]) + if module in library_stats: + library_stats[module]['attributes_accessed'].append({ + 'attribute_name': attr_name, + 'class_name': parts[-1], + 'status': val.validation.status.value, + 'confidence': val.validation.confidence + }) + + # Add function usage + for val in validation_result.function_validations: + func_name = val.function_call.function_name + full_name = val.function_call.full_name + + if full_name: + parts = full_name.split('.') + if len(parts) > 1: + module = '.'.join(parts[:-1]) + if module in library_stats: + library_stats[module]['functions_called'].append({ + 'function_name': func_name, + 'status': val.validation.status.value, + 'confidence': val.validation.confidence + }) + + return list(library_stats.values()) + + def _generate_recommendations(self, validation_result: ScriptValidationResult) -> List[str]: + """Generate recommendations based on validation results""" + recommendations = [] + + # Only count actual hallucinations (from knowledge graph libraries) + kg_hallucinations = [h for h in validation_result.hallucinations_detected] + + if kg_hallucinations: + method_issues = [h for h in kg_hallucinations if h['type'] == 'METHOD_NOT_FOUND'] + attr_issues = [h for h in kg_hallucinations if h['type'] == 'ATTRIBUTE_NOT_FOUND'] + param_issues = [h for h in kg_hallucinations if h['type'] == 'INVALID_PARAMETERS'] + + if method_issues: + recommendations.append( + f"Found {len(method_issues)} non-existent methods in knowledge graph libraries. " + "Consider checking the official documentation for correct method names." + ) + + if attr_issues: + recommendations.append( + f"Found {len(attr_issues)} non-existent attributes in knowledge graph libraries. " + "Verify attribute names against the class documentation." + ) + + if param_issues: + recommendations.append( + f"Found {len(param_issues)} parameter mismatches in knowledge graph libraries. " + "Check function signatures for correct parameter names and types." + ) + else: + recommendations.append( + "No hallucinations detected in knowledge graph libraries. " + "External library usage appears to be working as expected." + ) + + if validation_result.overall_confidence < 0.7: + recommendations.append( + "Overall confidence is moderate. Most validations were for external libraries not in the knowledge graph." + ) + + return recommendations + + def save_json_report(self, report: Dict[str, Any], output_path: str): + """Save report as JSON file""" + with open(output_path, 'w', encoding='utf-8') as f: + json.dump(report, f, indent=2, ensure_ascii=False) + + logger.info(f"JSON report saved to: {output_path}") + + def save_markdown_report(self, report: Dict[str, Any], output_path: str): + """Save report as Markdown file""" + md_content = self._generate_markdown_content(report) + + with open(output_path, 'w', encoding='utf-8') as f: + f.write(md_content) + + logger.info(f"Markdown report saved to: {output_path}") + + def _generate_markdown_content(self, report: Dict[str, Any]) -> str: + """Generate Markdown content from report""" + md = [] + + # Header + md.append("# AI Hallucination Detection Report") + md.append("") + md.append(f"**Script:** `{report['analysis_metadata']['script_path']}`") + md.append(f"**Analysis Date:** {report['analysis_metadata']['analysis_timestamp']}") + md.append(f"**Overall Confidence:** {report['validation_summary']['overall_confidence']:.2%}") + md.append("") + + # Summary + summary = report['validation_summary'] + md.append("## Summary") + md.append("") + md.append(f"- **Total Validations:** {summary['total_validations']}") + md.append(f"- **Valid:** {summary['valid_count']} ({summary['valid_count']/summary['total_validations']:.1%})") + md.append(f"- **Invalid:** {summary['invalid_count']} ({summary['invalid_count']/summary['total_validations']:.1%})") + md.append(f"- **Not Found:** {summary['not_found_count']} ({summary['not_found_count']/summary['total_validations']:.1%})") + md.append(f"- **Uncertain:** {summary['uncertain_count']} ({summary['uncertain_count']/summary['total_validations']:.1%})") + md.append(f"- **Hallucination Rate:** {summary['hallucination_rate']:.1%}") + md.append("") + + # Hallucinations + if report['hallucinations_detected']: + md.append("## 🚨 Hallucinations Detected") + md.append("") + for i, hallucination in enumerate(report['hallucinations_detected'], 1): + md.append(f"### {i}. {hallucination['type'].replace('_', ' ').title()}") + md.append(f"**Location:** {hallucination['location']}") + md.append(f"**Description:** {hallucination['description']}") + if hallucination.get('suggestion'): + md.append(f"**Suggestion:** {hallucination['suggestion']}") + md.append("") + + # Libraries + if report['libraries_analyzed']: + md.append("## šŸ“š Libraries Analyzed") + md.append("") + for lib in report['libraries_analyzed']: + md.append(f"### {lib['module_name']}") + md.append(f"**Import Status:** {lib['import_status']}") + md.append(f"**Import Confidence:** {lib['import_confidence']:.2%}") + + if lib['classes_used']: + md.append("**Classes Used:**") + for cls in lib['classes_used']: + status_emoji = "āœ…" if cls['status'] == 'VALID' else "āŒ" + md.append(f" - {status_emoji} `{cls['class_name']}` ({cls['confidence']:.1%})") + + if lib['methods_called']: + md.append("**Methods Called:**") + for method in lib['methods_called']: + status_emoji = "āœ…" if method['status'] == 'VALID' else "āŒ" + md.append(f" - {status_emoji} `{method['class_name']}.{method['method_name']}()` ({method['confidence']:.1%})") + + if lib['attributes_accessed']: + md.append("**Attributes Accessed:**") + for attr in lib['attributes_accessed']: + status_emoji = "āœ…" if attr['status'] == 'VALID' else "āŒ" + md.append(f" - {status_emoji} `{attr['class_name']}.{attr['attribute_name']}` ({attr['confidence']:.1%})") + + if lib['functions_called']: + md.append("**Functions Called:**") + for func in lib['functions_called']: + status_emoji = "āœ…" if func['status'] == 'VALID' else "āŒ" + md.append(f" - {status_emoji} `{func['function_name']}()` ({func['confidence']:.1%})") + + md.append("") + + # Recommendations + if report['recommendations']: + md.append("## šŸ’” Recommendations") + md.append("") + for rec in report['recommendations']: + md.append(f"- {rec}") + md.append("") + + # Detailed Results + md.append("## šŸ“‹ Detailed Validation Results") + md.append("") + + # Invalid items + invalid_items = report['validation_details']['invalid_items'] + if invalid_items: + md.append("### āŒ Invalid Items") + md.append("") + for item in invalid_items: + md.append(f"- **{item['type']}** `{item['name']}` (Line {item['line']}) - {item['message']}") + md.append("") + + # Not found items + not_found_items = report['validation_details']['not_found_items'] + if not_found_items: + md.append("### šŸ” Not Found Items") + md.append("") + for item in not_found_items: + md.append(f"- **{item['type']}** `{item['name']}` (Line {item['line']}) - {item['message']}") + md.append("") + + # Valid items (sample) + valid_items = report['validation_details']['valid_items'] + if valid_items: + md.append("### āœ… Valid Items (Sample)") + md.append("") + for item in valid_items[:10]: # Show first 10 + md.append(f"- **{item['type']}** `{item['name']}` (Line {item['line']}) - {item['message']}") + if len(valid_items) > 10: + md.append(f"- ... and {len(valid_items) - 10} more valid items") + md.append("") + + return "\n".join(md) + + def print_summary(self, report: Dict[str, Any]): + """Print a concise summary to console""" + print("\n" + "="*80) + print("šŸ¤– AI HALLUCINATION DETECTION REPORT") + print("="*80) + + print(f"Script: {report['analysis_metadata']['script_path']}") + print(f"Overall Confidence: {report['validation_summary']['overall_confidence']:.1%}") + + summary = report['validation_summary'] + print(f"\nValidation Results:") + print(f" āœ… Valid: {summary['valid_count']}") + print(f" āŒ Invalid: {summary['invalid_count']}") + print(f" šŸ” Not Found: {summary['not_found_count']}") + print(f" ā“ Uncertain: {summary['uncertain_count']}") + print(f" šŸ“Š Hallucination Rate: {summary['hallucination_rate']:.1%}") + + if report['hallucinations_detected']: + print(f"\n🚨 {len(report['hallucinations_detected'])} Hallucinations Detected:") + for hall in report['hallucinations_detected'][:5]: # Show first 5 + print(f" - {hall['type'].replace('_', ' ').title()} at {hall['location']}") + print(f" {hall['description']}") + + if report['recommendations']: + print(f"\nšŸ’” Recommendations:") + for rec in report['recommendations'][:3]: # Show first 3 + print(f" - {rec}") + + print("="*80) \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/knowledge_graph_validator.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/knowledge_graph_validator.py new file mode 100644 index 0000000..893fd20 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/knowledge_graph_validator.py @@ -0,0 +1,1244 @@ +""" +Knowledge Graph Validator + +Validates AI-generated code against Neo4j knowledge graph containing +repository information. Checks imports, methods, attributes, and parameters. +""" + +import asyncio +import logging +from typing import Dict, List, Optional, Set, Tuple, Any +from dataclasses import dataclass, field +from enum import Enum +from neo4j import AsyncGraphDatabase + +from ai_script_analyzer import ( + AnalysisResult, ImportInfo, MethodCall, AttributeAccess, + FunctionCall, ClassInstantiation +) + +logger = logging.getLogger(__name__) + + +class ValidationStatus(Enum): + VALID = "VALID" + INVALID = "INVALID" + UNCERTAIN = "UNCERTAIN" + NOT_FOUND = "NOT_FOUND" + + +@dataclass +class ValidationResult: + """Result of validating a single element""" + status: ValidationStatus + confidence: float # 0.0 to 1.0 + message: str + details: Dict[str, Any] = field(default_factory=dict) + suggestions: List[str] = field(default_factory=list) + + +@dataclass +class ImportValidation: + """Validation result for an import""" + import_info: ImportInfo + validation: ValidationResult + available_classes: List[str] = field(default_factory=list) + available_functions: List[str] = field(default_factory=list) + + +@dataclass +class MethodValidation: + """Validation result for a method call""" + method_call: MethodCall + validation: ValidationResult + expected_params: List[str] = field(default_factory=list) + actual_params: List[str] = field(default_factory=list) + parameter_validation: ValidationResult = None + + +@dataclass +class AttributeValidation: + """Validation result for attribute access""" + attribute_access: AttributeAccess + validation: ValidationResult + expected_type: Optional[str] = None + + +@dataclass +class FunctionValidation: + """Validation result for function call""" + function_call: FunctionCall + validation: ValidationResult + expected_params: List[str] = field(default_factory=list) + actual_params: List[str] = field(default_factory=list) + parameter_validation: ValidationResult = None + + +@dataclass +class ClassValidation: + """Validation result for class instantiation""" + class_instantiation: ClassInstantiation + validation: ValidationResult + constructor_params: List[str] = field(default_factory=list) + parameter_validation: ValidationResult = None + + +@dataclass +class ScriptValidationResult: + """Complete validation results for a script""" + script_path: str + analysis_result: AnalysisResult + import_validations: List[ImportValidation] = field(default_factory=list) + class_validations: List[ClassValidation] = field(default_factory=list) + method_validations: List[MethodValidation] = field(default_factory=list) + attribute_validations: List[AttributeValidation] = field(default_factory=list) + function_validations: List[FunctionValidation] = field(default_factory=list) + overall_confidence: float = 0.0 + hallucinations_detected: List[Dict[str, Any]] = field(default_factory=list) + + +class KnowledgeGraphValidator: + """Validates code against Neo4j knowledge graph""" + + def __init__(self, neo4j_uri: str, neo4j_user: str, neo4j_password: str): + self.neo4j_uri = neo4j_uri + self.neo4j_user = neo4j_user + self.neo4j_password = neo4j_password + self.driver = None + + # Cache for performance + self.module_cache: Dict[str, List[str]] = {} + self.class_cache: Dict[str, Dict[str, Any]] = {} + self.method_cache: Dict[str, List[Dict[str, Any]]] = {} + self.repo_cache: Dict[str, str] = {} # module_name -> repo_name + self.knowledge_graph_modules: Set[str] = set() # Track modules in knowledge graph + + async def initialize(self): + """Initialize Neo4j connection""" + self.driver = AsyncGraphDatabase.driver( + self.neo4j_uri, + auth=(self.neo4j_user, self.neo4j_password) + ) + logger.info("Knowledge graph validator initialized") + + async def close(self): + """Close Neo4j connection""" + if self.driver: + await self.driver.close() + + async def validate_script(self, analysis_result: AnalysisResult) -> ScriptValidationResult: + """Validate entire script analysis against knowledge graph""" + result = ScriptValidationResult( + script_path=analysis_result.file_path, + analysis_result=analysis_result + ) + + # Validate imports first (builds context for other validations) + result.import_validations = await self._validate_imports(analysis_result.imports) + + # Validate class instantiations + result.class_validations = await self._validate_class_instantiations( + analysis_result.class_instantiations + ) + + # Validate method calls + result.method_validations = await self._validate_method_calls( + analysis_result.method_calls + ) + + # Validate attribute accesses + result.attribute_validations = await self._validate_attribute_accesses( + analysis_result.attribute_accesses + ) + + # Validate function calls + result.function_validations = await self._validate_function_calls( + analysis_result.function_calls + ) + + # Calculate overall confidence and detect hallucinations + result.overall_confidence = self._calculate_overall_confidence(result) + result.hallucinations_detected = self._detect_hallucinations(result) + + return result + + async def _validate_imports(self, imports: List[ImportInfo]) -> List[ImportValidation]: + """Validate all imports against knowledge graph""" + validations = [] + + for import_info in imports: + validation = await self._validate_single_import(import_info) + validations.append(validation) + + return validations + + async def _validate_single_import(self, import_info: ImportInfo) -> ImportValidation: + """Validate a single import""" + # Determine module to search for + search_module = import_info.module if import_info.is_from_import else import_info.name + + # Check cache first + if search_module in self.module_cache: + available_files = self.module_cache[search_module] + else: + # Query Neo4j for matching modules + available_files = await self._find_modules(search_module) + self.module_cache[search_module] = available_files + + if available_files: + # Get available classes and functions from the module + classes, functions = await self._get_module_contents(search_module) + + # Track this module as being in the knowledge graph + self.knowledge_graph_modules.add(search_module) + + # Also track the base module for "from X.Y.Z import ..." patterns + if '.' in search_module: + base_module = search_module.split('.')[0] + self.knowledge_graph_modules.add(base_module) + + validation = ValidationResult( + status=ValidationStatus.VALID, + confidence=0.9, + message=f"Module '{search_module}' found in knowledge graph", + details={"matched_files": available_files, "in_knowledge_graph": True} + ) + + return ImportValidation( + import_info=import_info, + validation=validation, + available_classes=classes, + available_functions=functions + ) + else: + # External library - mark as such but don't treat as error + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.8, # High confidence it's external, not an error + message=f"Module '{search_module}' is external (not in knowledge graph)", + details={"could_be_external": True, "in_knowledge_graph": False} + ) + + return ImportValidation( + import_info=import_info, + validation=validation + ) + + async def _validate_class_instantiations(self, instantiations: List[ClassInstantiation]) -> List[ClassValidation]: + """Validate class instantiations""" + validations = [] + + for instantiation in instantiations: + validation = await self._validate_single_class_instantiation(instantiation) + validations.append(validation) + + return validations + + async def _validate_single_class_instantiation(self, instantiation: ClassInstantiation) -> ClassValidation: + """Validate a single class instantiation""" + class_name = instantiation.full_class_name or instantiation.class_name + + # Skip validation for classes not from knowledge graph + if not self._is_from_knowledge_graph(class_name): + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.8, + message=f"Skipping validation: '{class_name}' is not from knowledge graph" + ) + return ClassValidation( + class_instantiation=instantiation, + validation=validation + ) + + # Find class in knowledge graph + class_info = await self._find_class(class_name) + + if not class_info: + validation = ValidationResult( + status=ValidationStatus.NOT_FOUND, + confidence=0.2, + message=f"Class '{class_name}' not found in knowledge graph" + ) + return ClassValidation( + class_instantiation=instantiation, + validation=validation + ) + + # Check constructor parameters (look for __init__ method) + init_method = await self._find_method(class_name, "__init__") + + if init_method: + param_validation = self._validate_parameters( + expected_params=init_method.get('params_list', []), + provided_args=instantiation.args, + provided_kwargs=instantiation.kwargs + ) + else: + param_validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.5, + message="Constructor parameters not found" + ) + + # Use parameter validation result if it failed + if param_validation.status == ValidationStatus.INVALID: + validation = ValidationResult( + status=ValidationStatus.INVALID, + confidence=param_validation.confidence, + message=f"Class '{class_name}' found but has invalid constructor parameters: {param_validation.message}", + suggestions=param_validation.suggestions + ) + else: + validation = ValidationResult( + status=ValidationStatus.VALID, + confidence=0.8, + message=f"Class '{class_name}' found in knowledge graph" + ) + + return ClassValidation( + class_instantiation=instantiation, + validation=validation, + parameter_validation=param_validation + ) + + async def _validate_method_calls(self, method_calls: List[MethodCall]) -> List[MethodValidation]: + """Validate method calls""" + validations = [] + + for method_call in method_calls: + validation = await self._validate_single_method_call(method_call) + validations.append(validation) + + return validations + + async def _validate_single_method_call(self, method_call: MethodCall) -> MethodValidation: + """Validate a single method call""" + class_type = method_call.object_type + + if not class_type: + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.3, + message=f"Cannot determine object type for '{method_call.object_name}'" + ) + return MethodValidation( + method_call=method_call, + validation=validation + ) + + # Skip validation for classes not from knowledge graph + if not self._is_from_knowledge_graph(class_type): + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.8, + message=f"Skipping validation: '{class_type}' is not from knowledge graph" + ) + return MethodValidation( + method_call=method_call, + validation=validation + ) + + # Find method in knowledge graph + method_info = await self._find_method(class_type, method_call.method_name) + + if not method_info: + # Check for similar method names + similar_methods = await self._find_similar_methods(class_type, method_call.method_name) + + validation = ValidationResult( + status=ValidationStatus.NOT_FOUND, + confidence=0.1, + message=f"Method '{method_call.method_name}' not found on class '{class_type}'", + suggestions=similar_methods + ) + return MethodValidation( + method_call=method_call, + validation=validation + ) + + # Validate parameters + expected_params = method_info.get('params_list', []) + param_validation = self._validate_parameters( + expected_params=expected_params, + provided_args=method_call.args, + provided_kwargs=method_call.kwargs + ) + + # Use parameter validation result if it failed + if param_validation.status == ValidationStatus.INVALID: + validation = ValidationResult( + status=ValidationStatus.INVALID, + confidence=param_validation.confidence, + message=f"Method '{method_call.method_name}' found but has invalid parameters: {param_validation.message}", + suggestions=param_validation.suggestions + ) + else: + validation = ValidationResult( + status=ValidationStatus.VALID, + confidence=0.9, + message=f"Method '{method_call.method_name}' found on class '{class_type}'" + ) + + return MethodValidation( + method_call=method_call, + validation=validation, + expected_params=expected_params, + actual_params=method_call.args + list(method_call.kwargs.keys()), + parameter_validation=param_validation + ) + + async def _validate_attribute_accesses(self, attribute_accesses: List[AttributeAccess]) -> List[AttributeValidation]: + """Validate attribute accesses""" + validations = [] + + for attr_access in attribute_accesses: + validation = await self._validate_single_attribute_access(attr_access) + validations.append(validation) + + return validations + + async def _validate_single_attribute_access(self, attr_access: AttributeAccess) -> AttributeValidation: + """Validate a single attribute access""" + class_type = attr_access.object_type + + if not class_type: + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.3, + message=f"Cannot determine object type for '{attr_access.object_name}'" + ) + return AttributeValidation( + attribute_access=attr_access, + validation=validation + ) + + # Skip validation for classes not from knowledge graph + if not self._is_from_knowledge_graph(class_type): + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.8, + message=f"Skipping validation: '{class_type}' is not from knowledge graph" + ) + return AttributeValidation( + attribute_access=attr_access, + validation=validation + ) + + # Find attribute in knowledge graph + attr_info = await self._find_attribute(class_type, attr_access.attribute_name) + + if not attr_info: + # If not found as attribute, check if it's a method (for decorators like @agent.tool) + method_info = await self._find_method(class_type, attr_access.attribute_name) + + if method_info: + validation = ValidationResult( + status=ValidationStatus.VALID, + confidence=0.8, + message=f"'{attr_access.attribute_name}' found as method on class '{class_type}' (likely used as decorator)" + ) + return AttributeValidation( + attribute_access=attr_access, + validation=validation, + expected_type="method" + ) + + validation = ValidationResult( + status=ValidationStatus.NOT_FOUND, + confidence=0.2, + message=f"'{attr_access.attribute_name}' not found on class '{class_type}'" + ) + return AttributeValidation( + attribute_access=attr_access, + validation=validation + ) + + validation = ValidationResult( + status=ValidationStatus.VALID, + confidence=0.8, + message=f"Attribute '{attr_access.attribute_name}' found on class '{class_type}'" + ) + + return AttributeValidation( + attribute_access=attr_access, + validation=validation, + expected_type=attr_info.get('type') + ) + + async def _validate_function_calls(self, function_calls: List[FunctionCall]) -> List[FunctionValidation]: + """Validate function calls""" + validations = [] + + for func_call in function_calls: + validation = await self._validate_single_function_call(func_call) + validations.append(validation) + + return validations + + async def _validate_single_function_call(self, func_call: FunctionCall) -> FunctionValidation: + """Validate a single function call""" + func_name = func_call.full_name or func_call.function_name + + # Skip validation for functions not from knowledge graph + if func_call.full_name and not self._is_from_knowledge_graph(func_call.full_name): + validation = ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.8, + message=f"Skipping validation: '{func_name}' is not from knowledge graph" + ) + return FunctionValidation( + function_call=func_call, + validation=validation + ) + + # Find function in knowledge graph + func_info = await self._find_function(func_name) + + if not func_info: + validation = ValidationResult( + status=ValidationStatus.NOT_FOUND, + confidence=0.2, + message=f"Function '{func_name}' not found in knowledge graph" + ) + return FunctionValidation( + function_call=func_call, + validation=validation + ) + + # Validate parameters + expected_params = func_info.get('params_list', []) + param_validation = self._validate_parameters( + expected_params=expected_params, + provided_args=func_call.args, + provided_kwargs=func_call.kwargs + ) + + # Use parameter validation result if it failed + if param_validation.status == ValidationStatus.INVALID: + validation = ValidationResult( + status=ValidationStatus.INVALID, + confidence=param_validation.confidence, + message=f"Function '{func_name}' found but has invalid parameters: {param_validation.message}", + suggestions=param_validation.suggestions + ) + else: + validation = ValidationResult( + status=ValidationStatus.VALID, + confidence=0.8, + message=f"Function '{func_name}' found in knowledge graph" + ) + + return FunctionValidation( + function_call=func_call, + validation=validation, + expected_params=expected_params, + actual_params=func_call.args + list(func_call.kwargs.keys()), + parameter_validation=param_validation + ) + + def _validate_parameters(self, expected_params: List[str], provided_args: List[str], + provided_kwargs: Dict[str, str]) -> ValidationResult: + """Validate function/method parameters with comprehensive support""" + if not expected_params: + return ValidationResult( + status=ValidationStatus.UNCERTAIN, + confidence=0.5, + message="Parameter information not available" + ) + + # Parse expected parameters - handle detailed format + required_positional = [] + optional_positional = [] + keyword_only_required = [] + keyword_only_optional = [] + has_varargs = False + has_varkwargs = False + + for param in expected_params: + # Handle detailed format: "[keyword_only] name:type=default" or "name:type" + param_clean = param.strip() + + # Check for parameter kind prefix + kind = 'positional' + if param_clean.startswith('['): + end_bracket = param_clean.find(']') + if end_bracket > 0: + kind = param_clean[1:end_bracket] + param_clean = param_clean[end_bracket+1:].strip() + + # Check for varargs/varkwargs + if param_clean.startswith('*') and not param_clean.startswith('**'): + has_varargs = True + continue + elif param_clean.startswith('**'): + has_varkwargs = True + continue + + # Parse name and check if optional + if ':' in param_clean: + param_name = param_clean.split(':')[0] + is_optional = '=' in param_clean + + if kind == 'keyword_only': + if is_optional: + keyword_only_optional.append(param_name) + else: + keyword_only_required.append(param_name) + else: # positional + if is_optional: + optional_positional.append(param_name) + else: + required_positional.append(param_name) + + # Count provided parameters + provided_positional_count = len(provided_args) + provided_keyword_names = set(provided_kwargs.keys()) + + # Validate positional arguments + min_required_positional = len(required_positional) + max_allowed_positional = len(required_positional) + len(optional_positional) + + if not has_varargs and provided_positional_count > max_allowed_positional: + return ValidationResult( + status=ValidationStatus.INVALID, + confidence=0.8, + message=f"Too many positional arguments: provided {provided_positional_count}, max allowed {max_allowed_positional}" + ) + + if provided_positional_count < min_required_positional: + return ValidationResult( + status=ValidationStatus.INVALID, + confidence=0.8, + message=f"Too few positional arguments: provided {provided_positional_count}, required {min_required_positional}" + ) + + # Validate keyword arguments + all_valid_kwarg_names = set(required_positional + optional_positional + keyword_only_required + keyword_only_optional) + invalid_kwargs = provided_keyword_names - all_valid_kwarg_names + + if invalid_kwargs and not has_varkwargs: + return ValidationResult( + status=ValidationStatus.INVALID, + confidence=0.7, + message=f"Invalid keyword arguments: {list(invalid_kwargs)}", + suggestions=[f"Valid parameters: {list(all_valid_kwarg_names)}"] + ) + + # Check required keyword-only arguments + missing_required_kwargs = set(keyword_only_required) - provided_keyword_names + if missing_required_kwargs: + return ValidationResult( + status=ValidationStatus.INVALID, + confidence=0.8, + message=f"Missing required keyword arguments: {list(missing_required_kwargs)}" + ) + + return ValidationResult( + status=ValidationStatus.VALID, + confidence=0.9, + message="Parameters are valid" + ) + + # Neo4j Query Methods + + async def _find_modules(self, module_name: str) -> List[str]: + """Find repository matching the module name, then return its files""" + async with self.driver.session() as session: + # First, try to find files with module names that match or start with the search term + module_query = """ + MATCH (r:Repository)-[:CONTAINS]->(f:File) + WHERE f.module_name = $module_name + OR f.module_name STARTS WITH $module_name + '.' + OR split(f.module_name, '.')[0] = $module_name + RETURN DISTINCT r.name as repo_name, count(f) as file_count + ORDER BY file_count DESC + LIMIT 5 + """ + + result = await session.run(module_query, module_name=module_name) + repos_from_modules = [] + async for record in result: + repos_from_modules.append(record['repo_name']) + + # Also try repository name matching as fallback + repo_query = """ + MATCH (r:Repository) + WHERE toLower(r.name) = toLower($module_name) + OR toLower(replace(r.name, '-', '_')) = toLower($module_name) + OR toLower(replace(r.name, '_', '-')) = toLower($module_name) + RETURN r.name as repo_name + ORDER BY + CASE + WHEN toLower(r.name) = toLower($module_name) THEN 1 + WHEN toLower(replace(r.name, '-', '_')) = toLower($module_name) THEN 2 + WHEN toLower(replace(r.name, '_', '-')) = toLower($module_name) THEN 3 + END + LIMIT 5 + """ + + result = await session.run(repo_query, module_name=module_name) + repos_from_names = [] + async for record in result: + repos_from_names.append(record['repo_name']) + + # Combine results, prioritizing module-based matches + all_repos = repos_from_modules + [r for r in repos_from_names if r not in repos_from_modules] + + if not all_repos: + return [] + + # Get files from the best matching repository + best_repo = all_repos[0] + files_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File) + RETURN f.path, f.module_name + LIMIT 50 + """ + + result = await session.run(files_query, repo_name=best_repo) + files = [] + async for record in result: + files.append(record['f.path']) + + return files + + async def _get_module_contents(self, module_name: str) -> Tuple[List[str], List[str]]: + """Get classes and functions available in a repository matching the module name""" + async with self.driver.session() as session: + # First, try to find repository by module names in files + module_query = """ + MATCH (r:Repository)-[:CONTAINS]->(f:File) + WHERE f.module_name = $module_name + OR f.module_name STARTS WITH $module_name + '.' + OR split(f.module_name, '.')[0] = $module_name + RETURN DISTINCT r.name as repo_name, count(f) as file_count + ORDER BY file_count DESC + LIMIT 1 + """ + + result = await session.run(module_query, module_name=module_name) + record = await result.single() + + if record: + repo_name = record['repo_name'] + else: + # Fallback to repository name matching + repo_query = """ + MATCH (r:Repository) + WHERE toLower(r.name) = toLower($module_name) + OR toLower(replace(r.name, '-', '_')) = toLower($module_name) + OR toLower(replace(r.name, '_', '-')) = toLower($module_name) + RETURN r.name as repo_name + ORDER BY + CASE + WHEN toLower(r.name) = toLower($module_name) THEN 1 + WHEN toLower(replace(r.name, '-', '_')) = toLower($module_name) THEN 2 + WHEN toLower(replace(r.name, '_', '-')) = toLower($module_name) THEN 3 + END + LIMIT 1 + """ + + result = await session.run(repo_query, module_name=module_name) + record = await result.single() + + if not record: + return [], [] + + repo_name = record['repo_name'] + + # Get classes from this repository + class_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + RETURN DISTINCT c.name as class_name + """ + + result = await session.run(class_query, repo_name=repo_name) + classes = [] + async for record in result: + classes.append(record['class_name']) + + # Get functions from this repository + func_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(func:Function) + RETURN DISTINCT func.name as function_name + """ + + result = await session.run(func_query, repo_name=repo_name) + functions = [] + async for record in result: + functions.append(record['function_name']) + + return classes, functions + + async def _find_repository_for_module(self, module_name: str) -> Optional[str]: + """Find the repository name that matches a module name""" + if module_name in self.repo_cache: + return self.repo_cache[module_name] + + async with self.driver.session() as session: + # First, try to find repository by module names in files + module_query = """ + MATCH (r:Repository)-[:CONTAINS]->(f:File) + WHERE f.module_name = $module_name + OR f.module_name STARTS WITH $module_name + '.' + OR split(f.module_name, '.')[0] = $module_name + RETURN DISTINCT r.name as repo_name, count(f) as file_count + ORDER BY file_count DESC + LIMIT 1 + """ + + result = await session.run(module_query, module_name=module_name) + record = await result.single() + + if record: + repo_name = record['repo_name'] + else: + # Fallback to repository name matching + query = """ + MATCH (r:Repository) + WHERE toLower(r.name) = toLower($module_name) + OR toLower(replace(r.name, '-', '_')) = toLower($module_name) + OR toLower(replace(r.name, '_', '-')) = toLower($module_name) + OR toLower(r.name) CONTAINS toLower($module_name) + OR toLower($module_name) CONTAINS toLower(replace(r.name, '-', '_')) + RETURN r.name as repo_name + ORDER BY + CASE + WHEN toLower(r.name) = toLower($module_name) THEN 1 + WHEN toLower(replace(r.name, '-', '_')) = toLower($module_name) THEN 2 + ELSE 3 + END + LIMIT 1 + """ + + result = await session.run(query, module_name=module_name) + record = await result.single() + + repo_name = record['repo_name'] if record else None + + self.repo_cache[module_name] = repo_name + return repo_name + + async def _find_class(self, class_name: str) -> Optional[Dict[str, Any]]: + """Find class information in knowledge graph""" + async with self.driver.session() as session: + # First try exact match + query = """ + MATCH (c:Class) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN c.name as name, c.full_name as full_name + LIMIT 1 + """ + + result = await session.run(query, class_name=class_name) + record = await result.single() + + if record: + return { + 'name': record['name'], + 'full_name': record['full_name'] + } + + # If no exact match and class_name has dots, try repository-based search + if '.' in class_name: + parts = class_name.split('.') + module_part = '.'.join(parts[:-1]) # e.g., "pydantic_ai" + class_part = parts[-1] # e.g., "Agent" + + # Find repository for the module + repo_name = await self._find_repository_for_module(module_part) + + if repo_name: + # Search for class within this repository + repo_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + WHERE c.name = $class_name + RETURN c.name as name, c.full_name as full_name + LIMIT 1 + """ + + result = await session.run(repo_query, repo_name=repo_name, class_name=class_part) + record = await result.single() + + if record: + return { + 'name': record['name'], + 'full_name': record['full_name'] + } + + return None + + async def _find_method(self, class_name: str, method_name: str) -> Optional[Dict[str, Any]]: + """Find method information for a class""" + cache_key = f"{class_name}.{method_name}" + if cache_key in self.method_cache: + methods = self.method_cache[cache_key] + return methods[0] if methods else None + + async with self.driver.session() as session: + # First try exact match + query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE (c.name = $class_name OR c.full_name = $class_name) + AND m.name = $method_name + RETURN m.name as name, m.params_list as params_list, m.params_detailed as params_detailed, + m.return_type as return_type, m.args as args + LIMIT 1 + """ + + result = await session.run(query, class_name=class_name, method_name=method_name) + record = await result.single() + + if record: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + + method_info = { + 'name': record['name'], + 'params_list': params_to_use, + 'return_type': record['return_type'], + 'args': record['args'] or [] + } + self.method_cache[cache_key] = [method_info] + return method_info + + # If no exact match and class_name has dots, try repository-based search + if '.' in class_name: + parts = class_name.split('.') + module_part = '.'.join(parts[:-1]) # e.g., "pydantic_ai" + class_part = parts[-1] # e.g., "Agent" + + # Find repository for the module + repo_name = await self._find_repository_for_module(module_part) + + if repo_name: + # Search for method within this repository's classes + repo_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_METHOD]->(m:Method) + WHERE c.name = $class_name AND m.name = $method_name + RETURN m.name as name, m.params_list as params_list, m.params_detailed as params_detailed, + m.return_type as return_type, m.args as args + LIMIT 1 + """ + + result = await session.run(repo_query, repo_name=repo_name, class_name=class_part, method_name=method_name) + record = await result.single() + + if record: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + + method_info = { + 'name': record['name'], + 'params_list': params_to_use, + 'return_type': record['return_type'], + 'args': record['args'] or [] + } + self.method_cache[cache_key] = [method_info] + return method_info + + self.method_cache[cache_key] = [] + return None + + async def _find_attribute(self, class_name: str, attr_name: str) -> Optional[Dict[str, Any]]: + """Find attribute information for a class""" + async with self.driver.session() as session: + # First try exact match + query = """ + MATCH (c:Class)-[:HAS_ATTRIBUTE]->(a:Attribute) + WHERE (c.name = $class_name OR c.full_name = $class_name) + AND a.name = $attr_name + RETURN a.name as name, a.type as type + LIMIT 1 + """ + + result = await session.run(query, class_name=class_name, attr_name=attr_name) + record = await result.single() + + if record: + return { + 'name': record['name'], + 'type': record['type'] + } + + # If no exact match and class_name has dots, try repository-based search + if '.' in class_name: + parts = class_name.split('.') + module_part = '.'.join(parts[:-1]) # e.g., "pydantic_ai" + class_part = parts[-1] # e.g., "Agent" + + # Find repository for the module + repo_name = await self._find_repository_for_module(module_part) + + if repo_name: + # Search for attribute within this repository's classes + repo_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_ATTRIBUTE]->(a:Attribute) + WHERE c.name = $class_name AND a.name = $attr_name + RETURN a.name as name, a.type as type + LIMIT 1 + """ + + result = await session.run(repo_query, repo_name=repo_name, class_name=class_part, attr_name=attr_name) + record = await result.single() + + if record: + return { + 'name': record['name'], + 'type': record['type'] + } + + return None + + async def _find_function(self, func_name: str) -> Optional[Dict[str, Any]]: + """Find function information""" + async with self.driver.session() as session: + # First try exact match + query = """ + MATCH (f:Function) + WHERE f.name = $func_name OR f.full_name = $func_name + RETURN f.name as name, f.params_list as params_list, f.params_detailed as params_detailed, + f.return_type as return_type, f.args as args + LIMIT 1 + """ + + result = await session.run(query, func_name=func_name) + record = await result.single() + + if record: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + + return { + 'name': record['name'], + 'params_list': params_to_use, + 'return_type': record['return_type'], + 'args': record['args'] or [] + } + + # If no exact match and func_name has dots, try repository-based search + if '.' in func_name: + parts = func_name.split('.') + module_part = '.'.join(parts[:-1]) # e.g., "pydantic_ai" + func_part = parts[-1] # e.g., "some_function" + + # Find repository for the module + repo_name = await self._find_repository_for_module(module_part) + + if repo_name: + # Search for function within this repository + repo_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(func:Function) + WHERE func.name = $func_name + RETURN func.name as name, func.params_list as params_list, func.params_detailed as params_detailed, + func.return_type as return_type, func.args as args + LIMIT 1 + """ + + result = await session.run(repo_query, repo_name=repo_name, func_name=func_part) + record = await result.single() + + if record: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + + return { + 'name': record['name'], + 'params_list': params_to_use, + 'return_type': record['return_type'], + 'args': record['args'] or [] + } + + return None + + async def _find_pydantic_ai_result_method(self, method_name: str) -> Optional[Dict[str, Any]]: + """Find method information for pydantic_ai result objects""" + # Look for methods on pydantic_ai classes that could be result objects + async with self.driver.session() as session: + # Search for common result methods in pydantic_ai repository + query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_METHOD]->(m:Method) + WHERE m.name = $method_name + AND (c.name CONTAINS 'Result' OR c.name CONTAINS 'Stream' OR c.name CONTAINS 'Run') + RETURN m.name as name, m.params_list as params_list, m.params_detailed as params_detailed, + m.return_type as return_type, m.args as args, c.name as class_name + LIMIT 1 + """ + + result = await session.run(query, repo_name="pydantic_ai", method_name=method_name) + record = await result.single() + + if record: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + + return { + 'name': record['name'], + 'params_list': params_to_use, + 'return_type': record['return_type'], + 'args': record['args'] or [], + 'source_class': record['class_name'] + } + + return None + + async def _find_similar_modules(self, module_name: str) -> List[str]: + """Find similar repository names for suggestions""" + async with self.driver.session() as session: + query = """ + MATCH (r:Repository) + WHERE toLower(r.name) CONTAINS toLower($partial_name) + OR toLower(replace(r.name, '-', '_')) CONTAINS toLower($partial_name) + OR toLower(replace(r.name, '_', '-')) CONTAINS toLower($partial_name) + RETURN r.name + LIMIT 5 + """ + + result = await session.run(query, partial_name=module_name[:3]) + suggestions = [] + async for record in result: + suggestions.append(record['name']) + + return suggestions + + async def _find_similar_methods(self, class_name: str, method_name: str) -> List[str]: + """Find similar method names for suggestions""" + async with self.driver.session() as session: + # First try exact class match + query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE (c.name = $class_name OR c.full_name = $class_name) + AND m.name CONTAINS $partial_name + RETURN m.name as name + LIMIT 5 + """ + + result = await session.run(query, class_name=class_name, partial_name=method_name[:3]) + suggestions = [] + async for record in result: + suggestions.append(record['name']) + + # If no suggestions and class_name has dots, try repository-based search + if not suggestions and '.' in class_name: + parts = class_name.split('.') + module_part = '.'.join(parts[:-1]) # e.g., "pydantic_ai" + class_part = parts[-1] # e.g., "Agent" + + # Find repository for the module + repo_name = await self._find_repository_for_module(module_part) + + if repo_name: + repo_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_METHOD]->(m:Method) + WHERE c.name = $class_name AND m.name CONTAINS $partial_name + RETURN m.name as name + LIMIT 5 + """ + + result = await session.run(repo_query, repo_name=repo_name, class_name=class_part, partial_name=method_name[:3]) + async for record in result: + suggestions.append(record['name']) + + return suggestions + + def _calculate_overall_confidence(self, result: ScriptValidationResult) -> float: + """Calculate overall confidence score for the validation (knowledge graph items only)""" + kg_validations = [] + + # Only count validations from knowledge graph imports + for val in result.import_validations: + if val.validation.details.get('in_knowledge_graph', False): + kg_validations.append(val.validation.confidence) + + # Only count validations from knowledge graph classes + for val in result.class_validations: + class_name = val.class_instantiation.full_class_name or val.class_instantiation.class_name + if self._is_from_knowledge_graph(class_name): + kg_validations.append(val.validation.confidence) + + # Only count validations from knowledge graph methods + for val in result.method_validations: + if val.method_call.object_type and self._is_from_knowledge_graph(val.method_call.object_type): + kg_validations.append(val.validation.confidence) + + # Only count validations from knowledge graph attributes + for val in result.attribute_validations: + if val.attribute_access.object_type and self._is_from_knowledge_graph(val.attribute_access.object_type): + kg_validations.append(val.validation.confidence) + + # Only count validations from knowledge graph functions + for val in result.function_validations: + if val.function_call.full_name and self._is_from_knowledge_graph(val.function_call.full_name): + kg_validations.append(val.validation.confidence) + + if not kg_validations: + return 1.0 # No knowledge graph items to validate = perfect confidence + + return sum(kg_validations) / len(kg_validations) + + def _is_from_knowledge_graph(self, class_type: str) -> bool: + """Check if a class type comes from a module in the knowledge graph""" + if not class_type: + return False + + # For dotted names like "pydantic_ai.Agent" or "pydantic_ai.StreamedRunResult", check the base module + if '.' in class_type: + base_module = class_type.split('.')[0] + # Exact match only - "pydantic" should not match "pydantic_ai" + return base_module in self.knowledge_graph_modules + + # For simple names, check if any knowledge graph module matches exactly + # Don't use substring matching to avoid "pydantic" matching "pydantic_ai" + return class_type in self.knowledge_graph_modules + + def _detect_hallucinations(self, result: ScriptValidationResult) -> List[Dict[str, Any]]: + """Detect and categorize hallucinations""" + hallucinations = [] + reported_items = set() # Track reported items to avoid duplicates + + # Check method calls (only for knowledge graph classes) + for val in result.method_validations: + if (val.validation.status == ValidationStatus.NOT_FOUND and + val.method_call.object_type and + self._is_from_knowledge_graph(val.method_call.object_type)): + + # Create unique key to avoid duplicates + key = (val.method_call.line_number, val.method_call.method_name, val.method_call.object_type) + if key not in reported_items: + reported_items.add(key) + hallucinations.append({ + 'type': 'METHOD_NOT_FOUND', + 'location': f"line {val.method_call.line_number}", + 'description': f"Method '{val.method_call.method_name}' not found on class '{val.method_call.object_type}'", + 'suggestion': val.validation.suggestions[0] if val.validation.suggestions else None + }) + + # Check attributes (only for knowledge graph classes) - but skip if already reported as method + for val in result.attribute_validations: + if (val.validation.status == ValidationStatus.NOT_FOUND and + val.attribute_access.object_type and + self._is_from_knowledge_graph(val.attribute_access.object_type)): + + # Create unique key - if this was already reported as a method, skip it + key = (val.attribute_access.line_number, val.attribute_access.attribute_name, val.attribute_access.object_type) + if key not in reported_items: + reported_items.add(key) + hallucinations.append({ + 'type': 'ATTRIBUTE_NOT_FOUND', + 'location': f"line {val.attribute_access.line_number}", + 'description': f"Attribute '{val.attribute_access.attribute_name}' not found on class '{val.attribute_access.object_type}'" + }) + + # Check parameter issues (only for knowledge graph methods) + for val in result.method_validations: + if (val.parameter_validation and + val.parameter_validation.status == ValidationStatus.INVALID and + val.method_call.object_type and + self._is_from_knowledge_graph(val.method_call.object_type)): + hallucinations.append({ + 'type': 'INVALID_PARAMETERS', + 'location': f"line {val.method_call.line_number}", + 'description': f"Invalid parameters for method '{val.method_call.method_name}': {val.parameter_validation.message}" + }) + + return hallucinations \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/parse_repo_into_neo4j.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/parse_repo_into_neo4j.py new file mode 100644 index 0000000..e4eaa58 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/parse_repo_into_neo4j.py @@ -0,0 +1,858 @@ +""" +Direct Neo4j GitHub Code Repository Extractor + +Creates nodes and relationships directly in Neo4j without Graphiti: +- File nodes +- Class nodes +- Method nodes +- Function nodes +- Import relationships + +Bypasses all LLM processing for maximum speed. +""" + +import asyncio +import logging +import os +import subprocess +import shutil +from datetime import datetime, timezone +from pathlib import Path +from typing import List, Optional, Dict, Any, Set +import ast + +from dotenv import load_dotenv +from neo4j import AsyncGraphDatabase + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', +) +logger = logging.getLogger(__name__) + + +class Neo4jCodeAnalyzer: + """Analyzes code for direct Neo4j insertion""" + + def __init__(self): + # External modules to ignore + self.external_modules = { + # Python standard library + 'os', 'sys', 'json', 'logging', 'datetime', 'pathlib', 'typing', 'collections', + 'asyncio', 'subprocess', 'ast', 're', 'string', 'urllib', 'http', 'email', + 'time', 'uuid', 'hashlib', 'base64', 'itertools', 'functools', 'operator', + 'contextlib', 'copy', 'pickle', 'tempfile', 'shutil', 'glob', 'fnmatch', + 'io', 'codecs', 'locale', 'platform', 'socket', 'ssl', 'threading', 'queue', + 'multiprocessing', 'concurrent', 'warnings', 'traceback', 'inspect', + 'importlib', 'pkgutil', 'types', 'weakref', 'gc', 'dataclasses', 'enum', + 'abc', 'numbers', 'decimal', 'fractions', 'math', 'cmath', 'random', 'statistics', + + # Common third-party libraries + 'requests', 'urllib3', 'httpx', 'aiohttp', 'flask', 'django', 'fastapi', + 'pydantic', 'sqlalchemy', 'alembic', 'psycopg2', 'pymongo', 'redis', + 'celery', 'pytest', 'unittest', 'mock', 'faker', 'factory', 'hypothesis', + 'numpy', 'pandas', 'matplotlib', 'seaborn', 'scipy', 'sklearn', 'torch', + 'tensorflow', 'keras', 'opencv', 'pillow', 'boto3', 'botocore', 'azure', + 'google', 'openai', 'anthropic', 'langchain', 'transformers', 'huggingface_hub', + 'click', 'typer', 'rich', 'colorama', 'tqdm', 'python-dotenv', 'pyyaml', + 'toml', 'configargparse', 'marshmallow', 'attrs', 'dataclasses-json', + 'jsonschema', 'cerberus', 'voluptuous', 'schema', 'jinja2', 'mako', + 'cryptography', 'bcrypt', 'passlib', 'jwt', 'authlib', 'oauthlib' + } + + def analyze_python_file(self, file_path: Path, repo_root: Path, project_modules: Set[str]) -> Dict[str, Any]: + """Extract structure for direct Neo4j insertion""" + try: + with open(file_path, 'r', encoding='utf-8') as f: + content = f.read() + + tree = ast.parse(content) + relative_path = str(file_path.relative_to(repo_root)) + module_name = self._get_importable_module_name(file_path, repo_root, relative_path) + + # Extract structure + classes = [] + functions = [] + imports = [] + + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + # Extract class with its methods and attributes + methods = [] + attributes = [] + + for item in node.body: + if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)): + if not item.name.startswith('_'): # Public methods only + # Extract comprehensive parameter info + params = self._extract_function_parameters(item) + + # Get return type annotation + return_type = self._get_name(item.returns) if item.returns else 'Any' + + # Create detailed parameter list for Neo4j storage + params_detailed = [] + for p in params: + param_str = f"{p['name']}:{p['type']}" + if p['optional'] and p['default'] is not None: + param_str += f"={p['default']}" + elif p['optional']: + param_str += "=None" + if p['kind'] != 'positional': + param_str = f"[{p['kind']}] {param_str}" + params_detailed.append(param_str) + + methods.append({ + 'name': item.name, + 'params': params, # Full parameter objects + 'params_detailed': params_detailed, # Detailed string format + 'return_type': return_type, + 'args': [arg.arg for arg in item.args.args if arg.arg != 'self'] # Keep for backwards compatibility + }) + elif isinstance(item, ast.AnnAssign) and isinstance(item.target, ast.Name): + # Type annotated attributes + if not item.target.id.startswith('_'): + attributes.append({ + 'name': item.target.id, + 'type': self._get_name(item.annotation) if item.annotation else 'Any' + }) + + classes.append({ + 'name': node.name, + 'full_name': f"{module_name}.{node.name}", + 'methods': methods, + 'attributes': attributes + }) + + elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + # Only top-level functions + if not any(node in cls_node.body for cls_node in ast.walk(tree) if isinstance(cls_node, ast.ClassDef)): + if not node.name.startswith('_'): + # Extract comprehensive parameter info + params = self._extract_function_parameters(node) + + # Get return type annotation + return_type = self._get_name(node.returns) if node.returns else 'Any' + + # Create detailed parameter list for Neo4j storage + params_detailed = [] + for p in params: + param_str = f"{p['name']}:{p['type']}" + if p['optional'] and p['default'] is not None: + param_str += f"={p['default']}" + elif p['optional']: + param_str += "=None" + if p['kind'] != 'positional': + param_str = f"[{p['kind']}] {param_str}" + params_detailed.append(param_str) + + # Simple format for backwards compatibility + params_list = [f"{p['name']}:{p['type']}" for p in params] + + functions.append({ + 'name': node.name, + 'full_name': f"{module_name}.{node.name}", + 'params': params, # Full parameter objects + 'params_detailed': params_detailed, # Detailed string format + 'params_list': params_list, # Simple string format for backwards compatibility + 'return_type': return_type, + 'args': [arg.arg for arg in node.args.args] # Keep for backwards compatibility + }) + + elif isinstance(node, (ast.Import, ast.ImportFrom)): + # Track internal imports only + if isinstance(node, ast.Import): + for alias in node.names: + if self._is_likely_internal(alias.name, project_modules): + imports.append(alias.name) + elif isinstance(node, ast.ImportFrom) and node.module: + if (node.module.startswith('.') or self._is_likely_internal(node.module, project_modules)): + imports.append(node.module) + + return { + 'module_name': module_name, + 'file_path': relative_path, + 'classes': classes, + 'functions': functions, + 'imports': list(set(imports)), # Remove duplicates + 'line_count': len(content.splitlines()) + } + + except Exception as e: + logger.warning(f"Could not analyze {file_path}: {e}") + return None + + def _is_likely_internal(self, import_name: str, project_modules: Set[str]) -> bool: + """Check if an import is likely internal to the project""" + if not import_name: + return False + + # Relative imports are definitely internal + if import_name.startswith('.'): + return True + + # Check if it's a known external module + base_module = import_name.split('.')[0] + if base_module in self.external_modules: + return False + + # Check if it matches any project module + for project_module in project_modules: + if import_name.startswith(project_module): + return True + + # If it's not obviously external, consider it internal + if (not any(ext in base_module.lower() for ext in ['test', 'mock', 'fake']) and + not base_module.startswith('_') and + len(base_module) > 2): + return True + + return False + + def _get_importable_module_name(self, file_path: Path, repo_root: Path, relative_path: str) -> str: + """Determine the actual importable module name for a Python file""" + # Start with the default: convert file path to module path + default_module = relative_path.replace('/', '.').replace('\\', '.').replace('.py', '') + + # Common patterns to detect the actual package root + path_parts = Path(relative_path).parts + + # Look for common package indicators + package_roots = [] + + # Check each directory level for __init__.py to find package boundaries + current_path = repo_root + for i, part in enumerate(path_parts[:-1]): # Exclude the .py file itself + current_path = current_path / part + if (current_path / '__init__.py').exists(): + # This is a package directory, mark it as a potential root + package_roots.append(i) + + if package_roots: + # Use the first (outermost) package as the root + package_start = package_roots[0] + module_parts = path_parts[package_start:] + module_name = '.'.join(module_parts).replace('.py', '') + return module_name + + # Fallback: look for common Python project structures + # Skip common non-package directories + skip_dirs = {'src', 'lib', 'source', 'python', 'pkg', 'packages'} + + # Find the first directory that's not in skip_dirs + filtered_parts = [] + for part in path_parts: + if part.lower() not in skip_dirs or filtered_parts: # Once we start including, include everything + filtered_parts.append(part) + + if filtered_parts: + module_name = '.'.join(filtered_parts).replace('.py', '') + return module_name + + # Final fallback: use the default + return default_module + + def _extract_function_parameters(self, func_node): + """Comprehensive parameter extraction from function definition""" + params = [] + + # Regular positional arguments + for i, arg in enumerate(func_node.args.args): + if arg.arg == 'self': + continue + + param_info = { + 'name': arg.arg, + 'type': self._get_name(arg.annotation) if arg.annotation else 'Any', + 'kind': 'positional', + 'optional': False, + 'default': None + } + + # Check if this argument has a default value + defaults_start = len(func_node.args.args) - len(func_node.args.defaults) + if i >= defaults_start: + default_idx = i - defaults_start + if default_idx < len(func_node.args.defaults): + param_info['optional'] = True + param_info['default'] = self._get_default_value(func_node.args.defaults[default_idx]) + + params.append(param_info) + + # *args parameter + if func_node.args.vararg: + params.append({ + 'name': f"*{func_node.args.vararg.arg}", + 'type': self._get_name(func_node.args.vararg.annotation) if func_node.args.vararg.annotation else 'Any', + 'kind': 'var_positional', + 'optional': True, + 'default': None + }) + + # Keyword-only arguments (after *) + for i, arg in enumerate(func_node.args.kwonlyargs): + param_info = { + 'name': arg.arg, + 'type': self._get_name(arg.annotation) if arg.annotation else 'Any', + 'kind': 'keyword_only', + 'optional': True, # All kwonly args are optional unless explicitly required + 'default': None + } + + # Check for default value + if i < len(func_node.args.kw_defaults) and func_node.args.kw_defaults[i] is not None: + param_info['default'] = self._get_default_value(func_node.args.kw_defaults[i]) + else: + param_info['optional'] = False # No default = required kwonly arg + + params.append(param_info) + + # **kwargs parameter + if func_node.args.kwarg: + params.append({ + 'name': f"**{func_node.args.kwarg.arg}", + 'type': self._get_name(func_node.args.kwarg.annotation) if func_node.args.kwarg.annotation else 'Dict[str, Any]', + 'kind': 'var_keyword', + 'optional': True, + 'default': None + }) + + return params + + def _get_default_value(self, default_node): + """Extract default value from AST node""" + try: + if isinstance(default_node, ast.Constant): + return repr(default_node.value) + elif isinstance(default_node, ast.Name): + return default_node.id + elif isinstance(default_node, ast.Attribute): + return self._get_name(default_node) + elif isinstance(default_node, ast.List): + return "[]" + elif isinstance(default_node, ast.Dict): + return "{}" + else: + return "..." + except Exception: + return "..." + + def _get_name(self, node): + """Extract name from AST node, handling complex types safely""" + if node is None: + return "Any" + + try: + if isinstance(node, ast.Name): + return node.id + elif isinstance(node, ast.Attribute): + if hasattr(node, 'value'): + return f"{self._get_name(node.value)}.{node.attr}" + else: + return node.attr + elif isinstance(node, ast.Subscript): + # Handle List[Type], Dict[K,V], etc. + base = self._get_name(node.value) + if hasattr(node, 'slice'): + if isinstance(node.slice, ast.Name): + return f"{base}[{node.slice.id}]" + elif isinstance(node.slice, ast.Tuple): + elts = [self._get_name(elt) for elt in node.slice.elts] + return f"{base}[{', '.join(elts)}]" + elif isinstance(node.slice, ast.Constant): + return f"{base}[{repr(node.slice.value)}]" + elif isinstance(node.slice, ast.Attribute): + return f"{base}[{self._get_name(node.slice)}]" + elif isinstance(node.slice, ast.Subscript): + return f"{base}[{self._get_name(node.slice)}]" + else: + # Try to get the name of the slice, fallback to Any if it fails + try: + slice_name = self._get_name(node.slice) + return f"{base}[{slice_name}]" + except: + return f"{base}[Any]" + return base + elif isinstance(node, ast.Constant): + return str(node.value) + elif isinstance(node, ast.Str): # Python < 3.8 + return f'"{node.s}"' + elif isinstance(node, ast.Tuple): + elts = [self._get_name(elt) for elt in node.elts] + return f"({', '.join(elts)})" + elif isinstance(node, ast.List): + elts = [self._get_name(elt) for elt in node.elts] + return f"[{', '.join(elts)}]" + else: + # Fallback for complex types - return a simple string representation + return "Any" + except Exception: + # If anything goes wrong, return a safe default + return "Any" + + +class DirectNeo4jExtractor: + """Creates nodes and relationships directly in Neo4j""" + + def __init__(self, neo4j_uri: str, neo4j_user: str, neo4j_password: str): + self.neo4j_uri = neo4j_uri + self.neo4j_user = neo4j_user + self.neo4j_password = neo4j_password + self.driver = None + self.analyzer = Neo4jCodeAnalyzer() + + async def initialize(self): + """Initialize Neo4j connection""" + logger.info("Initializing Neo4j connection...") + self.driver = AsyncGraphDatabase.driver( + self.neo4j_uri, + auth=(self.neo4j_user, self.neo4j_password) + ) + + # Clear existing data + # logger.info("Clearing existing data...") + # async with self.driver.session() as session: + # await session.run("MATCH (n) DETACH DELETE n") + + # Create constraints and indexes + logger.info("Creating constraints and indexes...") + async with self.driver.session() as session: + # Create constraints - using MERGE-friendly approach + await session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (f:File) REQUIRE f.path IS UNIQUE") + await session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (c:Class) REQUIRE c.full_name IS UNIQUE") + # Remove unique constraints for methods/attributes since they can be duplicated across classes + # await session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (m:Method) REQUIRE m.full_name IS UNIQUE") + # await session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (f:Function) REQUIRE f.full_name IS UNIQUE") + # await session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (a:Attribute) REQUIRE a.full_name IS UNIQUE") + + # Create indexes for performance + await session.run("CREATE INDEX IF NOT EXISTS FOR (f:File) ON (f.name)") + await session.run("CREATE INDEX IF NOT EXISTS FOR (c:Class) ON (c.name)") + await session.run("CREATE INDEX IF NOT EXISTS FOR (m:Method) ON (m.name)") + + logger.info("Neo4j initialized successfully") + + async def clear_repository_data(self, repo_name: str): + """Clear all data for a specific repository""" + logger.info(f"Clearing existing data for repository: {repo_name}") + async with self.driver.session() as session: + # Delete in specific order to avoid constraint issues + + # 1. Delete methods and attributes (they depend on classes) + await session.run(""" + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_METHOD]->(m:Method) + DETACH DELETE m + """, repo_name=repo_name) + + await session.run(""" + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_ATTRIBUTE]->(a:Attribute) + DETACH DELETE a + """, repo_name=repo_name) + + # 2. Delete functions (they depend on files) + await session.run(""" + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(func:Function) + DETACH DELETE func + """, repo_name=repo_name) + + # 3. Delete classes (they depend on files) + await session.run(""" + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + DETACH DELETE c + """, repo_name=repo_name) + + # 4. Delete files (they depend on repository) + await session.run(""" + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File) + DETACH DELETE f + """, repo_name=repo_name) + + # 5. Finally delete the repository + await session.run(""" + MATCH (r:Repository {name: $repo_name}) + DETACH DELETE r + """, repo_name=repo_name) + + logger.info(f"Cleared data for repository: {repo_name}") + + async def close(self): + """Close Neo4j connection""" + if self.driver: + await self.driver.close() + + def clone_repo(self, repo_url: str, target_dir: str) -> str: + """Clone repository with shallow clone""" + logger.info(f"Cloning repository to: {target_dir}") + if os.path.exists(target_dir): + logger.info(f"Removing existing directory: {target_dir}") + try: + def handle_remove_readonly(func, path, exc): + try: + if os.path.exists(path): + os.chmod(path, 0o777) + func(path) + except PermissionError: + logger.warning(f"Could not remove {path} - file in use, skipping") + pass + shutil.rmtree(target_dir, onerror=handle_remove_readonly) + except Exception as e: + logger.warning(f"Could not fully remove {target_dir}: {e}. Proceeding anyway...") + + logger.info(f"Running git clone from {repo_url}") + subprocess.run(['git', 'clone', '--depth', '1', repo_url, target_dir], check=True) + logger.info("Repository cloned successfully") + return target_dir + + def get_python_files(self, repo_path: str) -> List[Path]: + """Get Python files, focusing on main source directories""" + python_files = [] + exclude_dirs = { + 'tests', 'test', '__pycache__', '.git', 'venv', 'env', + 'node_modules', 'build', 'dist', '.pytest_cache', 'docs', + 'examples', 'example', 'demo', 'benchmark' + } + + for root, dirs, files in os.walk(repo_path): + dirs[:] = [d for d in dirs if d not in exclude_dirs and not d.startswith('.')] + + for file in files: + if file.endswith('.py') and not file.startswith('test_'): + file_path = Path(root) / file + if (file_path.stat().st_size < 500_000 and + file not in ['setup.py', 'conftest.py']): + python_files.append(file_path) + + return python_files + + async def analyze_repository(self, repo_url: str, temp_dir: str = None): + """Analyze repository and create nodes/relationships in Neo4j""" + repo_name = repo_url.split('/')[-1].replace('.git', '') + logger.info(f"Analyzing repository: {repo_name}") + + # Clear existing data for this repository before re-processing + await self.clear_repository_data(repo_name) + + # Set default temp_dir to repos folder at script level + if temp_dir is None: + script_dir = Path(__file__).parent + temp_dir = str(script_dir / "repos" / repo_name) + + # Clone and analyze + repo_path = Path(self.clone_repo(repo_url, temp_dir)) + + try: + logger.info("Getting Python files...") + python_files = self.get_python_files(str(repo_path)) + logger.info(f"Found {len(python_files)} Python files to analyze") + + # First pass: identify project modules + logger.info("Identifying project modules...") + project_modules = set() + for file_path in python_files: + relative_path = str(file_path.relative_to(repo_path)) + module_parts = relative_path.replace('/', '.').replace('.py', '').split('.') + if len(module_parts) > 0 and not module_parts[0].startswith('.'): + project_modules.add(module_parts[0]) + + logger.info(f"Identified project modules: {sorted(project_modules)}") + + # Second pass: analyze files and collect data + logger.info("Analyzing Python files...") + modules_data = [] + for i, file_path in enumerate(python_files): + if i % 20 == 0: + logger.info(f"Analyzing file {i+1}/{len(python_files)}: {file_path.name}") + + analysis = self.analyzer.analyze_python_file(file_path, repo_path, project_modules) + if analysis: + modules_data.append(analysis) + + logger.info(f"Found {len(modules_data)} files with content") + + # Create nodes and relationships in Neo4j + logger.info("Creating nodes and relationships in Neo4j...") + await self._create_graph(repo_name, modules_data) + + # Print summary + total_classes = sum(len(mod['classes']) for mod in modules_data) + total_methods = sum(len(cls['methods']) for mod in modules_data for cls in mod['classes']) + total_functions = sum(len(mod['functions']) for mod in modules_data) + total_imports = sum(len(mod['imports']) for mod in modules_data) + + print(f"\\n=== Direct Neo4j Repository Analysis for {repo_name} ===") + print(f"Files processed: {len(modules_data)}") + print(f"Classes created: {total_classes}") + print(f"Methods created: {total_methods}") + print(f"Functions created: {total_functions}") + print(f"Import relationships: {total_imports}") + + logger.info(f"Successfully created Neo4j graph for {repo_name}") + + finally: + if os.path.exists(temp_dir): + logger.info(f"Cleaning up temporary directory: {temp_dir}") + try: + def handle_remove_readonly(func, path, exc): + try: + if os.path.exists(path): + os.chmod(path, 0o777) + func(path) + except PermissionError: + logger.warning(f"Could not remove {path} - file in use, skipping") + pass + + shutil.rmtree(temp_dir, onerror=handle_remove_readonly) + logger.info("Cleanup completed") + except Exception as e: + logger.warning(f"Cleanup failed: {e}. Directory may remain at {temp_dir}") + # Don't fail the whole process due to cleanup issues + + async def _create_graph(self, repo_name: str, modules_data: List[Dict]): + """Create all nodes and relationships in Neo4j""" + + async with self.driver.session() as session: + # Create Repository node + await session.run( + "CREATE (r:Repository {name: $repo_name, created_at: datetime()})", + repo_name=repo_name + ) + + nodes_created = 0 + relationships_created = 0 + + for i, mod in enumerate(modules_data): + # 1. Create File node + await session.run(""" + CREATE (f:File { + name: $name, + path: $path, + module_name: $module_name, + line_count: $line_count, + created_at: datetime() + }) + """, + name=mod['file_path'].split('/')[-1], + path=mod['file_path'], + module_name=mod['module_name'], + line_count=mod['line_count'] + ) + nodes_created += 1 + + # 2. Connect File to Repository + await session.run(""" + MATCH (r:Repository {name: $repo_name}) + MATCH (f:File {path: $file_path}) + CREATE (r)-[:CONTAINS]->(f) + """, repo_name=repo_name, file_path=mod['file_path']) + relationships_created += 1 + + # 3. Create Class nodes and relationships + for cls in mod['classes']: + # Create Class node using MERGE to avoid duplicates + await session.run(""" + MERGE (c:Class {full_name: $full_name}) + ON CREATE SET c.name = $name, c.created_at = datetime() + """, name=cls['name'], full_name=cls['full_name']) + nodes_created += 1 + + # Connect File to Class + await session.run(""" + MATCH (f:File {path: $file_path}) + MATCH (c:Class {full_name: $class_full_name}) + MERGE (f)-[:DEFINES]->(c) + """, file_path=mod['file_path'], class_full_name=cls['full_name']) + relationships_created += 1 + + # 4. Create Method nodes - use MERGE to avoid duplicates + for method in cls['methods']: + method_full_name = f"{cls['full_name']}.{method['name']}" + # Create method with unique ID to avoid conflicts + method_id = f"{cls['full_name']}::{method['name']}" + + await session.run(""" + MERGE (m:Method {method_id: $method_id}) + ON CREATE SET m.name = $name, + m.full_name = $full_name, + m.args = $args, + m.params_list = $params_list, + m.params_detailed = $params_detailed, + m.return_type = $return_type, + m.created_at = datetime() + """, + name=method['name'], + full_name=method_full_name, + method_id=method_id, + args=method['args'], + params_list=[f"{p['name']}:{p['type']}" for p in method['params']], # Simple format + params_detailed=method.get('params_detailed', []), # Detailed format + return_type=method['return_type'] + ) + nodes_created += 1 + + # Connect Class to Method + await session.run(""" + MATCH (c:Class {full_name: $class_full_name}) + MATCH (m:Method {method_id: $method_id}) + MERGE (c)-[:HAS_METHOD]->(m) + """, + class_full_name=cls['full_name'], + method_id=method_id + ) + relationships_created += 1 + + # 5. Create Attribute nodes - use MERGE to avoid duplicates + for attr in cls['attributes']: + attr_full_name = f"{cls['full_name']}.{attr['name']}" + # Create attribute with unique ID to avoid conflicts + attr_id = f"{cls['full_name']}::{attr['name']}" + await session.run(""" + MERGE (a:Attribute {attr_id: $attr_id}) + ON CREATE SET a.name = $name, + a.full_name = $full_name, + a.type = $type, + a.created_at = datetime() + """, + name=attr['name'], + full_name=attr_full_name, + attr_id=attr_id, + type=attr['type'] + ) + nodes_created += 1 + + # Connect Class to Attribute + await session.run(""" + MATCH (c:Class {full_name: $class_full_name}) + MATCH (a:Attribute {attr_id: $attr_id}) + MERGE (c)-[:HAS_ATTRIBUTE]->(a) + """, + class_full_name=cls['full_name'], + attr_id=attr_id + ) + relationships_created += 1 + + # 6. Create Function nodes (top-level) - use MERGE to avoid duplicates + for func in mod['functions']: + func_id = f"{mod['file_path']}::{func['name']}" + await session.run(""" + MERGE (f:Function {func_id: $func_id}) + ON CREATE SET f.name = $name, + f.full_name = $full_name, + f.args = $args, + f.params_list = $params_list, + f.params_detailed = $params_detailed, + f.return_type = $return_type, + f.created_at = datetime() + """, + name=func['name'], + full_name=func['full_name'], + func_id=func_id, + args=func['args'], + params_list=func.get('params_list', []), # Simple format for backwards compatibility + params_detailed=func.get('params_detailed', []), # Detailed format + return_type=func['return_type'] + ) + nodes_created += 1 + + # Connect File to Function + await session.run(""" + MATCH (file:File {path: $file_path}) + MATCH (func:Function {func_id: $func_id}) + MERGE (file)-[:DEFINES]->(func) + """, file_path=mod['file_path'], func_id=func_id) + relationships_created += 1 + + # 7. Create Import relationships + for import_name in mod['imports']: + # Try to find the target file + await session.run(""" + MATCH (source:File {path: $source_path}) + OPTIONAL MATCH (target:File) + WHERE target.module_name = $import_name OR target.module_name STARTS WITH $import_name + WITH source, target + WHERE target IS NOT NULL + MERGE (source)-[:IMPORTS]->(target) + """, source_path=mod['file_path'], import_name=import_name) + relationships_created += 1 + + if (i + 1) % 10 == 0: + logger.info(f"Processed {i + 1}/{len(modules_data)} files...") + + logger.info(f"Created {nodes_created} nodes and {relationships_created} relationships") + + async def search_graph(self, query_type: str, **kwargs): + """Search the Neo4j graph directly""" + async with self.driver.session() as session: + if query_type == "files_importing": + target = kwargs.get('target') + result = await session.run(""" + MATCH (source:File)-[:IMPORTS]->(target:File) + WHERE target.module_name CONTAINS $target + RETURN source.path as file, target.module_name as imports + """, target=target) + return [{"file": record["file"], "imports": record["imports"]} async for record in result] + + elif query_type == "classes_in_file": + file_path = kwargs.get('file_path') + result = await session.run(""" + MATCH (f:File {path: $file_path})-[:DEFINES]->(c:Class) + RETURN c.name as class_name, c.full_name as full_name + """, file_path=file_path) + return [{"class_name": record["class_name"], "full_name": record["full_name"]} async for record in result] + + elif query_type == "methods_of_class": + class_name = kwargs.get('class_name') + result = await session.run(""" + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE c.name CONTAINS $class_name OR c.full_name CONTAINS $class_name + RETURN m.name as method_name, m.args as args + """, class_name=class_name) + return [{"method_name": record["method_name"], "args": record["args"]} async for record in result] + + +async def main(): + """Example usage""" + load_dotenv() + + neo4j_uri = os.environ.get('NEO4J_URI', 'bolt://localhost:7687') + neo4j_user = os.environ.get('NEO4J_USER', 'neo4j') + neo4j_password = os.environ.get('NEO4J_PASSWORD', 'password') + + extractor = DirectNeo4jExtractor(neo4j_uri, neo4j_user, neo4j_password) + + try: + await extractor.initialize() + + # Analyze repository - direct Neo4j, no LLM processing! + # repo_url = "https://github.com/pydantic/pydantic-ai.git" + repo_url = "https://github.com/getzep/graphiti.git" + await extractor.analyze_repository(repo_url) + + # Direct graph queries + print("\\n=== Direct Neo4j Queries ===") + + # Which files import from models? + results = await extractor.search_graph("files_importing", target="models") + print(f"\\nFiles importing from 'models': {len(results)}") + for result in results[:3]: + print(f"- {result['file']} imports {result['imports']}") + + # What classes are in a specific file? + results = await extractor.search_graph("classes_in_file", file_path="pydantic_ai/models/openai.py") + print(f"\\nClasses in openai.py: {len(results)}") + for result in results: + print(f"- {result['class_name']}") + + # What methods does OpenAIModel have? + results = await extractor.search_graph("methods_of_class", class_name="OpenAIModel") + print(f"\\nMethods of OpenAIModel: {len(results)}") + for result in results[:5]: + print(f"- {result['method_name']}({', '.join(result['args'])})") + + finally: + await extractor.close() + + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/query_knowledge_graph.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/query_knowledge_graph.py new file mode 100644 index 0000000..981e42f --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/query_knowledge_graph.py @@ -0,0 +1,400 @@ +#!/usr/bin/env python3 +""" +Knowledge Graph Query Tool + +Interactive script to explore what's actually stored in your Neo4j knowledge graph. +Useful for debugging hallucination detection and understanding graph contents. +""" + +import asyncio +import os +from dotenv import load_dotenv +from neo4j import AsyncGraphDatabase +from typing import List, Dict, Any +import argparse + + +class KnowledgeGraphQuerier: + """Interactive tool to query the knowledge graph""" + + def __init__(self, neo4j_uri: str, neo4j_user: str, neo4j_password: str): + self.neo4j_uri = neo4j_uri + self.neo4j_user = neo4j_user + self.neo4j_password = neo4j_password + self.driver = None + + async def initialize(self): + """Initialize Neo4j connection""" + self.driver = AsyncGraphDatabase.driver( + self.neo4j_uri, + auth=(self.neo4j_user, self.neo4j_password) + ) + print("šŸ”— Connected to Neo4j knowledge graph") + + async def close(self): + """Close Neo4j connection""" + if self.driver: + await self.driver.close() + + async def list_repositories(self): + """List all repositories in the knowledge graph""" + print("\nšŸ“š Repositories in Knowledge Graph:") + print("=" * 50) + + async with self.driver.session() as session: + query = "MATCH (r:Repository) RETURN r.name as name ORDER BY r.name" + result = await session.run(query) + + repos = [] + async for record in result: + repos.append(record['name']) + + if repos: + for i, repo in enumerate(repos, 1): + print(f"{i}. {repo}") + else: + print("No repositories found in knowledge graph.") + + return repos + + async def explore_repository(self, repo_name: str): + """Get overview of a specific repository""" + print(f"\nšŸ” Exploring Repository: {repo_name}") + print("=" * 60) + + async with self.driver.session() as session: + # Get file count + files_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File) + RETURN count(f) as file_count + """ + result = await session.run(files_query, repo_name=repo_name) + file_count = (await result.single())['file_count'] + + # Get class count + classes_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + RETURN count(DISTINCT c) as class_count + """ + result = await session.run(classes_query, repo_name=repo_name) + class_count = (await result.single())['class_count'] + + # Get function count + functions_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(func:Function) + RETURN count(DISTINCT func) as function_count + """ + result = await session.run(functions_query, repo_name=repo_name) + function_count = (await result.single())['function_count'] + + print(f"šŸ“„ Files: {file_count}") + print(f"šŸ—ļø Classes: {class_count}") + print(f"āš™ļø Functions: {function_count}") + + async def list_classes(self, repo_name: str = None, limit: int = 20): + """List classes in the knowledge graph""" + title = f"Classes in {repo_name}" if repo_name else "All Classes" + print(f"\nšŸ—ļø {title} (limit {limit}):") + print("=" * 50) + + async with self.driver.session() as session: + if repo_name: + query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + RETURN c.name as name, c.full_name as full_name + ORDER BY c.name + LIMIT $limit + """ + result = await session.run(query, repo_name=repo_name, limit=limit) + else: + query = """ + MATCH (c:Class) + RETURN c.name as name, c.full_name as full_name + ORDER BY c.name + LIMIT $limit + """ + result = await session.run(query, limit=limit) + + classes = [] + async for record in result: + classes.append({ + 'name': record['name'], + 'full_name': record['full_name'] + }) + + if classes: + for i, cls in enumerate(classes, 1): + print(f"{i:2d}. {cls['name']} ({cls['full_name']})") + else: + print("No classes found.") + + return classes + + async def explore_class(self, class_name: str): + """Get detailed information about a specific class""" + print(f"\nšŸ” Exploring Class: {class_name}") + print("=" * 60) + + async with self.driver.session() as session: + # Find the class + class_query = """ + MATCH (c:Class) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN c.name as name, c.full_name as full_name + LIMIT 1 + """ + result = await session.run(class_query, class_name=class_name) + class_record = await result.single() + + if not class_record: + print(f"āŒ Class '{class_name}' not found in knowledge graph.") + return None + + actual_name = class_record['name'] + full_name = class_record['full_name'] + + print(f"šŸ“‹ Name: {actual_name}") + print(f"šŸ“‹ Full Name: {full_name}") + + # Get methods + methods_query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN m.name as name, m.params_list as params_list, m.params_detailed as params_detailed, m.return_type as return_type + ORDER BY m.name + """ + result = await session.run(methods_query, class_name=class_name) + + methods = [] + async for record in result: + methods.append({ + 'name': record['name'], + 'params_list': record['params_list'] or [], + 'params_detailed': record['params_detailed'] or [], + 'return_type': record['return_type'] or 'Any' + }) + + if methods: + print(f"\nāš™ļø Methods ({len(methods)}):") + for i, method in enumerate(methods, 1): + # Use detailed params if available, fall back to simple params + params_to_show = method['params_detailed'] or method['params_list'] + params = ', '.join(params_to_show) if params_to_show else '' + print(f"{i:2d}. {method['name']}({params}) -> {method['return_type']}") + else: + print("\nāš™ļø No methods found.") + + # Get attributes + attributes_query = """ + MATCH (c:Class)-[:HAS_ATTRIBUTE]->(a:Attribute) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN a.name as name, a.type as type + ORDER BY a.name + """ + result = await session.run(attributes_query, class_name=class_name) + + attributes = [] + async for record in result: + attributes.append({ + 'name': record['name'], + 'type': record['type'] or 'Any' + }) + + if attributes: + print(f"\nšŸ“‹ Attributes ({len(attributes)}):") + for i, attr in enumerate(attributes, 1): + print(f"{i:2d}. {attr['name']}: {attr['type']}") + else: + print("\nšŸ“‹ No attributes found.") + + return {'methods': methods, 'attributes': attributes} + + async def search_method(self, method_name: str, class_name: str = None): + """Search for methods by name""" + title = f"Method '{method_name}'" + if class_name: + title += f" in class '{class_name}'" + + print(f"\nšŸ” Searching for {title}:") + print("=" * 60) + + async with self.driver.session() as session: + if class_name: + query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE (c.name = $class_name OR c.full_name = $class_name) + AND m.name = $method_name + RETURN c.name as class_name, c.full_name as class_full_name, + m.name as method_name, m.params_list as params_list, + m.return_type as return_type, m.args as args + """ + result = await session.run(query, class_name=class_name, method_name=method_name) + else: + query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE m.name = $method_name + RETURN c.name as class_name, c.full_name as class_full_name, + m.name as method_name, m.params_list as params_list, + m.return_type as return_type, m.args as args + ORDER BY c.name + """ + result = await session.run(query, method_name=method_name) + + methods = [] + async for record in result: + methods.append({ + 'class_name': record['class_name'], + 'class_full_name': record['class_full_name'], + 'method_name': record['method_name'], + 'params_list': record['params_list'] or [], + 'return_type': record['return_type'] or 'Any', + 'args': record['args'] or [] + }) + + if methods: + for i, method in enumerate(methods, 1): + params = ', '.join(method['params_list']) if method['params_list'] else '' + print(f"{i}. {method['class_full_name']}.{method['method_name']}({params}) -> {method['return_type']}") + if method['args']: + print(f" Legacy args: {method['args']}") + else: + print(f"āŒ Method '{method_name}' not found.") + + return methods + + async def run_custom_query(self, query: str): + """Run a custom Cypher query""" + print(f"\nšŸ” Running Custom Query:") + print("=" * 60) + print(f"Query: {query}") + print("-" * 60) + + async with self.driver.session() as session: + try: + result = await session.run(query) + + records = [] + async for record in result: + records.append(dict(record)) + + if records: + for i, record in enumerate(records, 1): + print(f"{i:2d}. {record}") + if i >= 20: # Limit output + print(f"... and {len(records) - 20} more records") + break + else: + print("No results found.") + + return records + + except Exception as e: + print(f"āŒ Query error: {str(e)}") + return None + + +async def interactive_mode(querier: KnowledgeGraphQuerier): + """Interactive exploration mode""" + print("\nšŸš€ Welcome to Knowledge Graph Explorer!") + print("Available commands:") + print(" repos - List all repositories") + print(" explore - Explore a specific repository") + print(" classes [repo] - List classes (optionally in specific repo)") + print(" class - Explore a specific class") + print(" method [class] - Search for method") + print(" query - Run custom Cypher query") + print(" quit - Exit") + print() + + while True: + try: + command = input("šŸ” > ").strip() + + if not command: + continue + elif command == "quit": + break + elif command == "repos": + await querier.list_repositories() + elif command.startswith("explore "): + repo_name = command[8:].strip() + await querier.explore_repository(repo_name) + elif command == "classes": + await querier.list_classes() + elif command.startswith("classes "): + repo_name = command[8:].strip() + await querier.list_classes(repo_name) + elif command.startswith("class "): + class_name = command[6:].strip() + await querier.explore_class(class_name) + elif command.startswith("method "): + parts = command[7:].strip().split() + if len(parts) >= 2: + await querier.search_method(parts[0], parts[1]) + else: + await querier.search_method(parts[0]) + elif command.startswith("query "): + query = command[6:].strip() + await querier.run_custom_query(query) + else: + print("āŒ Unknown command. Type 'quit' to exit.") + + except KeyboardInterrupt: + print("\nšŸ‘‹ Goodbye!") + break + except Exception as e: + print(f"āŒ Error: {str(e)}") + + +async def main(): + """Main function with CLI argument support""" + parser = argparse.ArgumentParser(description="Query the knowledge graph") + parser.add_argument('--repos', action='store_true', help='List repositories') + parser.add_argument('--classes', metavar='REPO', nargs='?', const='', help='List classes') + parser.add_argument('--explore', metavar='REPO', help='Explore repository') + parser.add_argument('--class', dest='class_name', metavar='NAME', help='Explore class') + parser.add_argument('--method', nargs='+', metavar=('NAME', 'CLASS'), help='Search method') + parser.add_argument('--query', metavar='CYPHER', help='Run custom query') + parser.add_argument('--interactive', action='store_true', help='Interactive mode') + + args = parser.parse_args() + + # Load environment + load_dotenv() + neo4j_uri = os.environ.get('NEO4J_URI', 'bolt://localhost:7687') + neo4j_user = os.environ.get('NEO4J_USER', 'neo4j') + neo4j_password = os.environ.get('NEO4J_PASSWORD', 'password') + + querier = KnowledgeGraphQuerier(neo4j_uri, neo4j_user, neo4j_password) + + try: + await querier.initialize() + + # Execute commands based on arguments + if args.repos: + await querier.list_repositories() + elif args.classes is not None: + await querier.list_classes(args.classes if args.classes else None) + elif args.explore: + await querier.explore_repository(args.explore) + elif args.class_name: + await querier.explore_class(args.class_name) + elif args.method: + if len(args.method) >= 2: + await querier.search_method(args.method[0], args.method[1]) + else: + await querier.search_method(args.method[0]) + elif args.query: + await querier.run_custom_query(args.query) + elif args.interactive or len(sys.argv) == 1: + await interactive_mode(querier) + else: + parser.print_help() + + finally: + await querier.close() + + +if __name__ == "__main__": + import sys + asyncio.run(main()) \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/knowledge_graphs/test_script.py b/servers/mcp-crawl4ai-rag/knowledge_graphs/test_script.py new file mode 100644 index 0000000..d2ec070 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/knowledge_graphs/test_script.py @@ -0,0 +1,160 @@ +from __future__ import annotations +from typing import Dict, List, Optional +from dataclasses import dataclass +from pydantic import BaseModel, Field +from dotenv import load_dotenv +from rich.markdown import Markdown +from rich.console import Console +from rich.live import Live +import asyncio +import os + +from pydantic_ai.providers.openai import OpenAIProvider +from pydantic_ai.models.openai import OpenAIModel +from pydantic_ai import Agent, RunContext +from graphiti_core import Graphiti + +load_dotenv() + +# ========== Define dependencies ========== +@dataclass +class GraphitiDependencies: + """Dependencies for the Graphiti agent.""" + graphiti_client: Graphiti + +# ========== Helper function to get model configuration ========== +def get_model(): + """Configure and return the LLM model to use.""" + model_choice = os.getenv('MODEL_CHOICE', 'gpt-4.1-mini') + api_key = os.getenv('OPENAI_API_KEY', 'no-api-key-provided') + + return OpenAIModel(model_choice, provider=OpenAIProvider(api_key=api_key)) + +# ========== Create the Graphiti agent ========== +graphiti_agent = Agent( + get_model(), + system_prompt="""You are a helpful assistant with access to a knowledge graph filled with temporal data about LLMs. + When the user asks you a question, use your search tool to query the knowledge graph and then answer honestly. + Be willing to admit when you didn't find the information necessary to answer the question.""", + deps_type=GraphitiDependencies +) + +# ========== Define a result model for Graphiti search ========== +class GraphitiSearchResult(BaseModel): + """Model representing a search result from Graphiti.""" + uuid: str = Field(description="The unique identifier for this fact") + fact: str = Field(description="The factual statement retrieved from the knowledge graph") + valid_at: Optional[str] = Field(None, description="When this fact became valid (if known)") + invalid_at: Optional[str] = Field(None, description="When this fact became invalid (if known)") + source_node_uuid: Optional[str] = Field(None, description="UUID of the source node") + +# ========== Graphiti search tool ========== +@graphiti_agent.tool +async def search_graphiti(ctx: RunContext[GraphitiDependencies], query: str) -> List[GraphitiSearchResult]: + """Search the Graphiti knowledge graph with the given query. + + Args: + ctx: The run context containing dependencies + query: The search query to find information in the knowledge graph + + Returns: + A list of search results containing facts that match the query + """ + # Access the Graphiti client from dependencies + graphiti = ctx.deps.graphiti_client + + try: + # Perform the search + results = await graphiti.search(query) + + # Format the results + formatted_results = [] + for result in results: + formatted_result = GraphitiSearchResult( + uuid=result.uuid, + fact=result.fact, + source_node_uuid=result.source_node_uuid if hasattr(result, 'source_node_uuid') else None + ) + + # Add temporal information if available + if hasattr(result, 'valid_at') and result.valid_at: + formatted_result.valid_at = str(result.valid_at) + if hasattr(result, 'invalid_at') and result.invalid_at: + formatted_result.invalid_at = str(result.invalid_at) + + formatted_results.append(formatted_result) + + return formatted_results + except Exception as e: + # Log the error but don't close the connection since it's managed by the dependency + print(f"Error searching Graphiti: {str(e)}") + raise + +# ========== Main execution function ========== +async def main(): + """Run the Graphiti agent with user queries.""" + print("Graphiti Agent - Powered by Pydantic AI, Graphiti, and Neo4j") + print("Enter 'exit' to quit the program.") + + # Neo4j connection parameters + neo4j_uri = os.environ.get('NEO4J_URI', 'bolt://localhost:7687') + neo4j_user = os.environ.get('NEO4J_USER', 'neo4j') + neo4j_password = os.environ.get('NEO4J_PASSWORD', 'password') + + # Initialize Graphiti with Neo4j connection + graphiti_client = Graphiti(neo4j_uri, neo4j_user, neo4j_password) + + # Initialize the graph database with graphiti's indices if needed + try: + await graphiti_client.build_indices_and_constraints() + print("Graphiti indices built successfully.") + except Exception as e: + print(f"Note: {str(e)}") + print("Continuing with existing indices...") + + console = Console() + messages = [] + + try: + while True: + # Get user input + user_input = input("\n[You] ") + + # Check if user wants to exit + if user_input.lower() in ['exit', 'quit', 'bye', 'goodbye']: + print("Goodbye!") + break + + try: + # Process the user input and output the response + print("\n[Assistant]") + with Live('', console=console, vertical_overflow='visible') as live: + # Pass the Graphiti client as a dependency + deps = GraphitiDependencies(graphiti_client=graphiti_client) + + async with graphiti_agent.run_a_stream( + user_input, message_history=messages, deps=deps + ) as result: + curr_message = "" + async for message in result.stream_text(delta=True): + curr_message += message + live.update(Markdown(curr_message)) + + # Add the new messages to the chat history + messages.extend(result.all_messages()) + + except Exception as e: + print(f"\n[Error] An error occurred: {str(e)}") + finally: + # Close the Graphiti connection when done + await graphiti_client.close() + print("\nGraphiti connection closed.") + +if __name__ == "__main__": + try: + asyncio.run(main()) + except KeyboardInterrupt: + print("\nProgram terminated by user.") + except Exception as e: + print(f"\nUnexpected error: {str(e)}") + raise diff --git a/servers/mcp-crawl4ai-rag/pyproject.toml b/servers/mcp-crawl4ai-rag/pyproject.toml new file mode 100644 index 0000000..6961309 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "crawl4ai-mcp" +version = "0.1.0" +description = "MCP server for integrating web crawling and RAG into AI agents and AI coding assistants" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "crawl4ai==0.6.2", + "mcp==1.7.1", + "supabase==2.15.1", + "openai==1.71.0", + "dotenv==0.9.9", + "sentence-transformers>=4.1.0", + "neo4j>=5.28.1", +] diff --git a/servers/mcp-crawl4ai-rag/src/crawl4ai_mcp.py b/servers/mcp-crawl4ai-rag/src/crawl4ai_mcp.py new file mode 100644 index 0000000..990811d --- /dev/null +++ b/servers/mcp-crawl4ai-rag/src/crawl4ai_mcp.py @@ -0,0 +1,1854 @@ +""" +MCP server for web crawling with Crawl4AI. + +This server provides tools to crawl websites using Crawl4AI, automatically detecting +the appropriate crawl method based on URL type (sitemap, txt file, or regular webpage). +Also includes AI hallucination detection and repository parsing tools using Neo4j knowledge graphs. +""" +from mcp.server.fastmcp import FastMCP, Context +from sentence_transformers import CrossEncoder +from contextlib import asynccontextmanager +from collections.abc import AsyncIterator +from dataclasses import dataclass +from typing import List, Dict, Any, Optional +from urllib.parse import urlparse, urldefrag +from xml.etree import ElementTree +from dotenv import load_dotenv +from supabase import Client +from pathlib import Path +import requests +import asyncio +import json +import os +import re +import concurrent.futures +import sys + +from crawl4ai import AsyncWebCrawler, BrowserConfig, CrawlerRunConfig, CacheMode, MemoryAdaptiveDispatcher + +# Add knowledge_graphs folder to path for importing knowledge graph modules +knowledge_graphs_path = Path(__file__).resolve().parent.parent / 'knowledge_graphs' +sys.path.append(str(knowledge_graphs_path)) + +from utils import ( + get_supabase_client, + add_documents_to_supabase, + search_documents, + extract_code_blocks, + generate_code_example_summary, + add_code_examples_to_supabase, + update_source_info, + extract_source_summary, + search_code_examples +) + +# Import knowledge graph modules +from knowledge_graph_validator import KnowledgeGraphValidator +from parse_repo_into_neo4j import DirectNeo4jExtractor +from ai_script_analyzer import AIScriptAnalyzer +from hallucination_reporter import HallucinationReporter + +# Load environment variables from the project root .env file +project_root = Path(__file__).resolve().parent.parent +dotenv_path = project_root / '.env' + +# Force override of existing environment variables +load_dotenv(dotenv_path, override=True) + +# Helper functions for Neo4j validation and error handling +def validate_neo4j_connection() -> bool: + """Check if Neo4j environment variables are configured.""" + return all([ + os.getenv("NEO4J_URI"), + os.getenv("NEO4J_USER"), + os.getenv("NEO4J_PASSWORD") + ]) + +def format_neo4j_error(error: Exception) -> str: + """Format Neo4j connection errors for user-friendly messages.""" + error_str = str(error).lower() + if "authentication" in error_str or "unauthorized" in error_str: + return "Neo4j authentication failed. Check NEO4J_USER and NEO4J_PASSWORD." + elif "connection" in error_str or "refused" in error_str or "timeout" in error_str: + return "Cannot connect to Neo4j. Check NEO4J_URI and ensure Neo4j is running." + elif "database" in error_str: + return "Neo4j database error. Check if the database exists and is accessible." + else: + return f"Neo4j error: {str(error)}" + +def validate_script_path(script_path: str) -> Dict[str, Any]: + """Validate script path and return error info if invalid.""" + if not script_path or not isinstance(script_path, str): + return {"valid": False, "error": "Script path is required"} + + if not os.path.exists(script_path): + return {"valid": False, "error": f"Script not found: {script_path}"} + + if not script_path.endswith('.py'): + return {"valid": False, "error": "Only Python (.py) files are supported"} + + try: + # Check if file is readable + with open(script_path, 'r', encoding='utf-8') as f: + f.read(1) # Read first character to test + return {"valid": True} + except Exception as e: + return {"valid": False, "error": f"Cannot read script file: {str(e)}"} + +def validate_github_url(repo_url: str) -> Dict[str, Any]: + """Validate GitHub repository URL.""" + if not repo_url or not isinstance(repo_url, str): + return {"valid": False, "error": "Repository URL is required"} + + repo_url = repo_url.strip() + + # Basic GitHub URL validation + if not ("github.com" in repo_url.lower() or repo_url.endswith(".git")): + return {"valid": False, "error": "Please provide a valid GitHub repository URL"} + + # Check URL format + if not (repo_url.startswith("https://") or repo_url.startswith("git@")): + return {"valid": False, "error": "Repository URL must start with https:// or git@"} + + return {"valid": True, "repo_name": repo_url.split('/')[-1].replace('.git', '')} + +# Create a dataclass for our application context +@dataclass +class Crawl4AIContext: + """Context for the Crawl4AI MCP server.""" + crawler: AsyncWebCrawler + supabase_client: Client + reranking_model: Optional[CrossEncoder] = None + knowledge_validator: Optional[Any] = None # KnowledgeGraphValidator when available + repo_extractor: Optional[Any] = None # DirectNeo4jExtractor when available + +@asynccontextmanager +async def crawl4ai_lifespan(server: FastMCP) -> AsyncIterator[Crawl4AIContext]: + """ + Manages the Crawl4AI client lifecycle. + + Args: + server: The FastMCP server instance + + Yields: + Crawl4AIContext: The context containing the Crawl4AI crawler and Supabase client + """ + # Create browser configuration + browser_config = BrowserConfig( + headless=True, + verbose=False + ) + + # Initialize the crawler + crawler = AsyncWebCrawler(config=browser_config) + await crawler.__aenter__() + + # Initialize Supabase client + supabase_client = get_supabase_client() + + # Initialize cross-encoder model for reranking if enabled + reranking_model = None + if os.getenv("USE_RERANKING", "false") == "true": + try: + reranking_model = CrossEncoder("cross-encoder/ms-marco-MiniLM-L-6-v2") + except Exception as e: + print(f"Failed to load reranking model: {e}") + reranking_model = None + + # Initialize Neo4j components if configured and enabled + knowledge_validator = None + repo_extractor = None + + # Check if knowledge graph functionality is enabled + knowledge_graph_enabled = os.getenv("USE_KNOWLEDGE_GRAPH", "false") == "true" + + if knowledge_graph_enabled: + neo4j_uri = os.getenv("NEO4J_URI") + neo4j_user = os.getenv("NEO4J_USER") + neo4j_password = os.getenv("NEO4J_PASSWORD") + + if neo4j_uri and neo4j_user and neo4j_password: + try: + print("Initializing knowledge graph components...") + + # Initialize knowledge graph validator + knowledge_validator = KnowledgeGraphValidator(neo4j_uri, neo4j_user, neo4j_password) + await knowledge_validator.initialize() + print("āœ“ Knowledge graph validator initialized") + + # Initialize repository extractor + repo_extractor = DirectNeo4jExtractor(neo4j_uri, neo4j_user, neo4j_password) + await repo_extractor.initialize() + print("āœ“ Repository extractor initialized") + + except Exception as e: + print(f"Failed to initialize Neo4j components: {format_neo4j_error(e)}") + knowledge_validator = None + repo_extractor = None + else: + print("Neo4j credentials not configured - knowledge graph tools will be unavailable") + else: + print("Knowledge graph functionality disabled - set USE_KNOWLEDGE_GRAPH=true to enable") + + try: + yield Crawl4AIContext( + crawler=crawler, + supabase_client=supabase_client, + reranking_model=reranking_model, + knowledge_validator=knowledge_validator, + repo_extractor=repo_extractor + ) + finally: + # Clean up all components + await crawler.__aexit__(None, None, None) + if knowledge_validator: + try: + await knowledge_validator.close() + print("āœ“ Knowledge graph validator closed") + except Exception as e: + print(f"Error closing knowledge validator: {e}") + if repo_extractor: + try: + await repo_extractor.close() + print("āœ“ Repository extractor closed") + except Exception as e: + print(f"Error closing repository extractor: {e}") + +# Initialize FastMCP server +mcp = FastMCP( + "mcp-crawl4ai-rag", + description="MCP server for RAG and web crawling with Crawl4AI", + lifespan=crawl4ai_lifespan, + host=os.getenv("HOST", "0.0.0.0"), + port=os.getenv("PORT", "8051") +) + +def rerank_results(model: CrossEncoder, query: str, results: List[Dict[str, Any]], content_key: str = "content") -> List[Dict[str, Any]]: + """ + Rerank search results using a cross-encoder model. + + Args: + model: The cross-encoder model to use for reranking + query: The search query + results: List of search results + content_key: The key in each result dict that contains the text content + + Returns: + Reranked list of results + """ + if not model or not results: + return results + + try: + # Extract content from results + texts = [result.get(content_key, "") for result in results] + + # Create pairs of [query, document] for the cross-encoder + pairs = [[query, text] for text in texts] + + # Get relevance scores from the cross-encoder + scores = model.predict(pairs) + + # Add scores to results and sort by score (descending) + for i, result in enumerate(results): + result["rerank_score"] = float(scores[i]) + + # Sort by rerank score + reranked = sorted(results, key=lambda x: x.get("rerank_score", 0), reverse=True) + + return reranked + except Exception as e: + print(f"Error during reranking: {e}") + return results + +def is_sitemap(url: str) -> bool: + """ + Check if a URL is a sitemap. + + Args: + url: URL to check + + Returns: + True if the URL is a sitemap, False otherwise + """ + return url.endswith('sitemap.xml') or 'sitemap' in urlparse(url).path + +def is_txt(url: str) -> bool: + """ + Check if a URL is a text file. + + Args: + url: URL to check + + Returns: + True if the URL is a text file, False otherwise + """ + return url.endswith('.txt') + +def parse_sitemap(sitemap_url: str) -> List[str]: + """ + Parse a sitemap and extract URLs. + + Args: + sitemap_url: URL of the sitemap + + Returns: + List of URLs found in the sitemap + """ + resp = requests.get(sitemap_url) + urls = [] + + if resp.status_code == 200: + try: + tree = ElementTree.fromstring(resp.content) + urls = [loc.text for loc in tree.findall('.//{*}loc')] + except Exception as e: + print(f"Error parsing sitemap XML: {e}") + + return urls + +def smart_chunk_markdown(text: str, chunk_size: int = 5000) -> List[str]: + """Split text into chunks, respecting code blocks and paragraphs.""" + chunks = [] + start = 0 + text_length = len(text) + + while start < text_length: + # Calculate end position + end = start + chunk_size + + # If we're at the end of the text, just take what's left + if end >= text_length: + chunks.append(text[start:].strip()) + break + + # Try to find a code block boundary first (```) + chunk = text[start:end] + code_block = chunk.rfind('```') + if code_block != -1 and code_block > chunk_size * 0.3: + end = start + code_block + + # If no code block, try to break at a paragraph + elif '\n\n' in chunk: + # Find the last paragraph break + last_break = chunk.rfind('\n\n') + if last_break > chunk_size * 0.3: # Only break if we're past 30% of chunk_size + end = start + last_break + + # If no paragraph break, try to break at a sentence + elif '. ' in chunk: + # Find the last sentence break + last_period = chunk.rfind('. ') + if last_period > chunk_size * 0.3: # Only break if we're past 30% of chunk_size + end = start + last_period + 1 + + # Extract chunk and clean it up + chunk = text[start:end].strip() + if chunk: + chunks.append(chunk) + + # Move start position for next chunk + start = end + + return chunks + +def extract_section_info(chunk: str) -> Dict[str, Any]: + """ + Extracts headers and stats from a chunk. + + Args: + chunk: Markdown chunk + + Returns: + Dictionary with headers and stats + """ + headers = re.findall(r'^(#+)\s+(.+)$', chunk, re.MULTILINE) + header_str = '; '.join([f'{h[0]} {h[1]}' for h in headers]) if headers else '' + + return { + "headers": header_str, + "char_count": len(chunk), + "word_count": len(chunk.split()) + } + +def process_code_example(args): + """ + Process a single code example to generate its summary. + This function is designed to be used with concurrent.futures. + + Args: + args: Tuple containing (code, context_before, context_after) + + Returns: + The generated summary + """ + code, context_before, context_after = args + return generate_code_example_summary(code, context_before, context_after) + +@mcp.tool() +async def crawl_single_page(ctx: Context, url: str) -> str: + """ + Crawl a single web page and store its content in Supabase. + + This tool is ideal for quickly retrieving content from a specific URL without following links. + The content is stored in Supabase for later retrieval and querying. + + Args: + ctx: The MCP server provided context + url: URL of the web page to crawl + + Returns: + Summary of the crawling operation and storage in Supabase + """ + try: + # Get the crawler from the context + crawler = ctx.request_context.lifespan_context.crawler + supabase_client = ctx.request_context.lifespan_context.supabase_client + + # Configure the crawl + run_config = CrawlerRunConfig(cache_mode=CacheMode.BYPASS, stream=False) + + # Crawl the page + result = await crawler.arun(url=url, config=run_config) + + if result.success and result.markdown: + # Extract source_id + parsed_url = urlparse(url) + source_id = parsed_url.netloc or parsed_url.path + + # Chunk the content + chunks = smart_chunk_markdown(result.markdown) + + # Prepare data for Supabase + urls = [] + chunk_numbers = [] + contents = [] + metadatas = [] + total_word_count = 0 + + for i, chunk in enumerate(chunks): + urls.append(url) + chunk_numbers.append(i) + contents.append(chunk) + + # Extract metadata + meta = extract_section_info(chunk) + meta["chunk_index"] = i + meta["url"] = url + meta["source"] = source_id + meta["crawl_time"] = str(asyncio.current_task().get_coro().__name__) + metadatas.append(meta) + + # Accumulate word count + total_word_count += meta.get("word_count", 0) + + # Create url_to_full_document mapping + url_to_full_document = {url: result.markdown} + + # Update source information FIRST (before inserting documents) + source_summary = extract_source_summary(source_id, result.markdown[:5000]) # Use first 5000 chars for summary + update_source_info(supabase_client, source_id, source_summary, total_word_count) + + # Add documentation chunks to Supabase (AFTER source exists) + add_documents_to_supabase(supabase_client, urls, chunk_numbers, contents, metadatas, url_to_full_document) + + # Extract and process code examples only if enabled + extract_code_examples = os.getenv("USE_AGENTIC_RAG", "false") == "true" + if extract_code_examples: + code_blocks = extract_code_blocks(result.markdown) + if code_blocks: + code_urls = [] + code_chunk_numbers = [] + code_examples = [] + code_summaries = [] + code_metadatas = [] + + # Process code examples in parallel + with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: + # Prepare arguments for parallel processing + summary_args = [(block['code'], block['context_before'], block['context_after']) + for block in code_blocks] + + # Generate summaries in parallel + summaries = list(executor.map(process_code_example, summary_args)) + + # Prepare code example data + for i, (block, summary) in enumerate(zip(code_blocks, summaries)): + code_urls.append(url) + code_chunk_numbers.append(i) + code_examples.append(block['code']) + code_summaries.append(summary) + + # Create metadata for code example + code_meta = { + "chunk_index": i, + "url": url, + "source": source_id, + "char_count": len(block['code']), + "word_count": len(block['code'].split()) + } + code_metadatas.append(code_meta) + + # Add code examples to Supabase + add_code_examples_to_supabase( + supabase_client, + code_urls, + code_chunk_numbers, + code_examples, + code_summaries, + code_metadatas + ) + + return json.dumps({ + "success": True, + "url": url, + "chunks_stored": len(chunks), + "code_examples_stored": len(code_blocks) if code_blocks else 0, + "content_length": len(result.markdown), + "total_word_count": total_word_count, + "source_id": source_id, + "links_count": { + "internal": len(result.links.get("internal", [])), + "external": len(result.links.get("external", [])) + } + }, indent=2) + else: + return json.dumps({ + "success": False, + "url": url, + "error": result.error_message + }, indent=2) + except Exception as e: + return json.dumps({ + "success": False, + "url": url, + "error": str(e) + }, indent=2) + +@mcp.tool() +async def smart_crawl_url(ctx: Context, url: str, max_depth: int = 3, max_concurrent: int = 10, chunk_size: int = 5000) -> str: + """ + Intelligently crawl a URL based on its type and store content in Supabase. + + This tool automatically detects the URL type and applies the appropriate crawling method: + - For sitemaps: Extracts and crawls all URLs in parallel + - For text files (llms.txt): Directly retrieves the content + - For regular webpages: Recursively crawls internal links up to the specified depth + + All crawled content is chunked and stored in Supabase for later retrieval and querying. + + Args: + ctx: The MCP server provided context + url: URL to crawl (can be a regular webpage, sitemap.xml, or .txt file) + max_depth: Maximum recursion depth for regular URLs (default: 3) + max_concurrent: Maximum number of concurrent browser sessions (default: 10) + chunk_size: Maximum size of each content chunk in characters (default: 1000) + + Returns: + JSON string with crawl summary and storage information + """ + try: + # Get the crawler from the context + crawler = ctx.request_context.lifespan_context.crawler + supabase_client = ctx.request_context.lifespan_context.supabase_client + + # Determine the crawl strategy + crawl_results = [] + crawl_type = None + + if is_txt(url): + # For text files, use simple crawl + crawl_results = await crawl_markdown_file(crawler, url) + crawl_type = "text_file" + elif is_sitemap(url): + # For sitemaps, extract URLs and crawl in parallel + sitemap_urls = parse_sitemap(url) + if not sitemap_urls: + return json.dumps({ + "success": False, + "url": url, + "error": "No URLs found in sitemap" + }, indent=2) + crawl_results = await crawl_batch(crawler, sitemap_urls, max_concurrent=max_concurrent) + crawl_type = "sitemap" + else: + # For regular URLs, use recursive crawl + crawl_results = await crawl_recursive_internal_links(crawler, [url], max_depth=max_depth, max_concurrent=max_concurrent) + crawl_type = "webpage" + + if not crawl_results: + return json.dumps({ + "success": False, + "url": url, + "error": "No content found" + }, indent=2) + + # Process results and store in Supabase + urls = [] + chunk_numbers = [] + contents = [] + metadatas = [] + chunk_count = 0 + + # Track sources and their content + source_content_map = {} + source_word_counts = {} + + # Process documentation chunks + for doc in crawl_results: + source_url = doc['url'] + md = doc['markdown'] + chunks = smart_chunk_markdown(md, chunk_size=chunk_size) + + # Extract source_id + parsed_url = urlparse(source_url) + source_id = parsed_url.netloc or parsed_url.path + + # Store content for source summary generation + if source_id not in source_content_map: + source_content_map[source_id] = md[:5000] # Store first 5000 chars + source_word_counts[source_id] = 0 + + for i, chunk in enumerate(chunks): + urls.append(source_url) + chunk_numbers.append(i) + contents.append(chunk) + + # Extract metadata + meta = extract_section_info(chunk) + meta["chunk_index"] = i + meta["url"] = source_url + meta["source"] = source_id + meta["crawl_type"] = crawl_type + meta["crawl_time"] = str(asyncio.current_task().get_coro().__name__) + metadatas.append(meta) + + # Accumulate word count + source_word_counts[source_id] += meta.get("word_count", 0) + + chunk_count += 1 + + # Create url_to_full_document mapping + url_to_full_document = {} + for doc in crawl_results: + url_to_full_document[doc['url']] = doc['markdown'] + + # Update source information for each unique source FIRST (before inserting documents) + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + source_summary_args = [(source_id, content) for source_id, content in source_content_map.items()] + source_summaries = list(executor.map(lambda args: extract_source_summary(args[0], args[1]), source_summary_args)) + + for (source_id, _), summary in zip(source_summary_args, source_summaries): + word_count = source_word_counts.get(source_id, 0) + update_source_info(supabase_client, source_id, summary, word_count) + + # Add documentation chunks to Supabase (AFTER sources exist) + batch_size = 20 + add_documents_to_supabase(supabase_client, urls, chunk_numbers, contents, metadatas, url_to_full_document, batch_size=batch_size) + + # Extract and process code examples from all documents only if enabled + extract_code_examples_enabled = os.getenv("USE_AGENTIC_RAG", "false") == "true" + if extract_code_examples_enabled: + all_code_blocks = [] + code_urls = [] + code_chunk_numbers = [] + code_examples = [] + code_summaries = [] + code_metadatas = [] + + # Extract code blocks from all documents + for doc in crawl_results: + source_url = doc['url'] + md = doc['markdown'] + code_blocks = extract_code_blocks(md) + + if code_blocks: + # Process code examples in parallel + with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: + # Prepare arguments for parallel processing + summary_args = [(block['code'], block['context_before'], block['context_after']) + for block in code_blocks] + + # Generate summaries in parallel + summaries = list(executor.map(process_code_example, summary_args)) + + # Prepare code example data + parsed_url = urlparse(source_url) + source_id = parsed_url.netloc or parsed_url.path + + for i, (block, summary) in enumerate(zip(code_blocks, summaries)): + code_urls.append(source_url) + code_chunk_numbers.append(len(code_examples)) # Use global code example index + code_examples.append(block['code']) + code_summaries.append(summary) + + # Create metadata for code example + code_meta = { + "chunk_index": len(code_examples) - 1, + "url": source_url, + "source": source_id, + "char_count": len(block['code']), + "word_count": len(block['code'].split()) + } + code_metadatas.append(code_meta) + + # Add all code examples to Supabase + if code_examples: + add_code_examples_to_supabase( + supabase_client, + code_urls, + code_chunk_numbers, + code_examples, + code_summaries, + code_metadatas, + batch_size=batch_size + ) + + return json.dumps({ + "success": True, + "url": url, + "crawl_type": crawl_type, + "pages_crawled": len(crawl_results), + "chunks_stored": chunk_count, + "code_examples_stored": len(code_examples), + "sources_updated": len(source_content_map), + "urls_crawled": [doc['url'] for doc in crawl_results][:5] + (["..."] if len(crawl_results) > 5 else []) + }, indent=2) + except Exception as e: + return json.dumps({ + "success": False, + "url": url, + "error": str(e) + }, indent=2) + +@mcp.tool() +async def get_available_sources(ctx: Context) -> str: + """ + Get all available sources from the sources table. + + This tool returns a list of all unique sources (domains) that have been crawled and stored + in the database, along with their summaries and statistics. This is useful for discovering + what content is available for querying. + + Always use this tool before calling the RAG query or code example query tool + with a specific source filter! + + Args: + ctx: The MCP server provided context + + Returns: + JSON string with the list of available sources and their details + """ + try: + # Get the Supabase client from the context + supabase_client = ctx.request_context.lifespan_context.supabase_client + + # Query the sources table directly + result = supabase_client.from_('sources')\ + .select('*')\ + .order('source_id')\ + .execute() + + # Format the sources with their details + sources = [] + if result.data: + for source in result.data: + sources.append({ + "source_id": source.get("source_id"), + "summary": source.get("summary"), + "total_words": source.get("total_words"), + "created_at": source.get("created_at"), + "updated_at": source.get("updated_at") + }) + + return json.dumps({ + "success": True, + "sources": sources, + "count": len(sources) + }, indent=2) + except Exception as e: + return json.dumps({ + "success": False, + "error": str(e) + }, indent=2) + +@mcp.tool() +async def perform_rag_query(ctx: Context, query: str, source: str = None, match_count: int = 5) -> str: + """ + Perform a RAG (Retrieval Augmented Generation) query on the stored content. + + This tool searches the vector database for content relevant to the query and returns + the matching documents. Optionally filter by source domain. + Get the source by using the get_available_sources tool before calling this search! + + Args: + ctx: The MCP server provided context + query: The search query + source: Optional source domain to filter results (e.g., 'example.com') + match_count: Maximum number of results to return (default: 5) + + Returns: + JSON string with the search results + """ + try: + # Get the Supabase client from the context + supabase_client = ctx.request_context.lifespan_context.supabase_client + + # Check if hybrid search is enabled + use_hybrid_search = os.getenv("USE_HYBRID_SEARCH", "false") == "true" + + # Prepare filter if source is provided and not empty + filter_metadata = None + if source and source.strip(): + filter_metadata = {"source": source} + + if use_hybrid_search: + # Hybrid search: combine vector and keyword search + + # 1. Get vector search results (get more to account for filtering) + vector_results = search_documents( + client=supabase_client, + query=query, + match_count=match_count * 2, # Get double to have room for filtering + filter_metadata=filter_metadata + ) + + # 2. Get keyword search results using ILIKE + keyword_query = supabase_client.from_('crawled_pages')\ + .select('id, url, chunk_number, content, metadata, source_id')\ + .ilike('content', f'%{query}%') + + # Apply source filter if provided + if source and source.strip(): + keyword_query = keyword_query.eq('source_id', source) + + # Execute keyword search + keyword_response = keyword_query.limit(match_count * 2).execute() + keyword_results = keyword_response.data if keyword_response.data else [] + + # 3. Combine results with preference for items appearing in both + seen_ids = set() + combined_results = [] + + # First, add items that appear in both searches (these are the best matches) + vector_ids = {r.get('id') for r in vector_results if r.get('id')} + for kr in keyword_results: + if kr['id'] in vector_ids and kr['id'] not in seen_ids: + # Find the vector result to get similarity score + for vr in vector_results: + if vr.get('id') == kr['id']: + # Boost similarity score for items in both results + vr['similarity'] = min(1.0, vr.get('similarity', 0) * 1.2) + combined_results.append(vr) + seen_ids.add(kr['id']) + break + + # Then add remaining vector results (semantic matches without exact keyword) + for vr in vector_results: + if vr.get('id') and vr['id'] not in seen_ids and len(combined_results) < match_count: + combined_results.append(vr) + seen_ids.add(vr['id']) + + # Finally, add pure keyword matches if we still need more results + for kr in keyword_results: + if kr['id'] not in seen_ids and len(combined_results) < match_count: + # Convert keyword result to match vector result format + combined_results.append({ + 'id': kr['id'], + 'url': kr['url'], + 'chunk_number': kr['chunk_number'], + 'content': kr['content'], + 'metadata': kr['metadata'], + 'source_id': kr['source_id'], + 'similarity': 0.5 # Default similarity for keyword-only matches + }) + seen_ids.add(kr['id']) + + # Use combined results + results = combined_results[:match_count] + + else: + # Standard vector search only + results = search_documents( + client=supabase_client, + query=query, + match_count=match_count, + filter_metadata=filter_metadata + ) + + # Apply reranking if enabled + use_reranking = os.getenv("USE_RERANKING", "false") == "true" + if use_reranking and ctx.request_context.lifespan_context.reranking_model: + results = rerank_results(ctx.request_context.lifespan_context.reranking_model, query, results, content_key="content") + + # Format the results + formatted_results = [] + for result in results: + formatted_result = { + "url": result.get("url"), + "content": result.get("content"), + "metadata": result.get("metadata"), + "similarity": result.get("similarity") + } + # Include rerank score if available + if "rerank_score" in result: + formatted_result["rerank_score"] = result["rerank_score"] + formatted_results.append(formatted_result) + + return json.dumps({ + "success": True, + "query": query, + "source_filter": source, + "search_mode": "hybrid" if use_hybrid_search else "vector", + "reranking_applied": use_reranking and ctx.request_context.lifespan_context.reranking_model is not None, + "results": formatted_results, + "count": len(formatted_results) + }, indent=2) + except Exception as e: + return json.dumps({ + "success": False, + "query": query, + "error": str(e) + }, indent=2) + +@mcp.tool() +async def search_code_examples(ctx: Context, query: str, source_id: str = None, match_count: int = 5) -> str: + """ + Search for code examples relevant to the query. + + This tool searches the vector database for code examples relevant to the query and returns + the matching examples with their summaries. Optionally filter by source_id. + Get the source_id by using the get_available_sources tool before calling this search! + + Use the get_available_sources tool first to see what sources are available for filtering. + + Args: + ctx: The MCP server provided context + query: The search query + source_id: Optional source ID to filter results (e.g., 'example.com') + match_count: Maximum number of results to return (default: 5) + + Returns: + JSON string with the search results + """ + # Check if code example extraction is enabled + extract_code_examples_enabled = os.getenv("USE_AGENTIC_RAG", "false") == "true" + if not extract_code_examples_enabled: + return json.dumps({ + "success": False, + "error": "Code example extraction is disabled. Perform a normal RAG search." + }, indent=2) + + try: + # Get the Supabase client from the context + supabase_client = ctx.request_context.lifespan_context.supabase_client + + # Check if hybrid search is enabled + use_hybrid_search = os.getenv("USE_HYBRID_SEARCH", "false") == "true" + + # Prepare filter if source is provided and not empty + filter_metadata = None + if source_id and source_id.strip(): + filter_metadata = {"source": source_id} + + if use_hybrid_search: + # Hybrid search: combine vector and keyword search + + # Import the search function from utils + from utils import search_code_examples as search_code_examples_impl + + # 1. Get vector search results (get more to account for filtering) + vector_results = search_code_examples_impl( + client=supabase_client, + query=query, + match_count=match_count * 2, # Get double to have room for filtering + filter_metadata=filter_metadata + ) + + # 2. Get keyword search results using ILIKE on both content and summary + keyword_query = supabase_client.from_('code_examples')\ + .select('id, url, chunk_number, content, summary, metadata, source_id')\ + .or_(f'content.ilike.%{query}%,summary.ilike.%{query}%') + + # Apply source filter if provided + if source_id and source_id.strip(): + keyword_query = keyword_query.eq('source_id', source_id) + + # Execute keyword search + keyword_response = keyword_query.limit(match_count * 2).execute() + keyword_results = keyword_response.data if keyword_response.data else [] + + # 3. Combine results with preference for items appearing in both + seen_ids = set() + combined_results = [] + + # First, add items that appear in both searches (these are the best matches) + vector_ids = {r.get('id') for r in vector_results if r.get('id')} + for kr in keyword_results: + if kr['id'] in vector_ids and kr['id'] not in seen_ids: + # Find the vector result to get similarity score + for vr in vector_results: + if vr.get('id') == kr['id']: + # Boost similarity score for items in both results + vr['similarity'] = min(1.0, vr.get('similarity', 0) * 1.2) + combined_results.append(vr) + seen_ids.add(kr['id']) + break + + # Then add remaining vector results (semantic matches without exact keyword) + for vr in vector_results: + if vr.get('id') and vr['id'] not in seen_ids and len(combined_results) < match_count: + combined_results.append(vr) + seen_ids.add(vr['id']) + + # Finally, add pure keyword matches if we still need more results + for kr in keyword_results: + if kr['id'] not in seen_ids and len(combined_results) < match_count: + # Convert keyword result to match vector result format + combined_results.append({ + 'id': kr['id'], + 'url': kr['url'], + 'chunk_number': kr['chunk_number'], + 'content': kr['content'], + 'summary': kr['summary'], + 'metadata': kr['metadata'], + 'source_id': kr['source_id'], + 'similarity': 0.5 # Default similarity for keyword-only matches + }) + seen_ids.add(kr['id']) + + # Use combined results + results = combined_results[:match_count] + + else: + # Standard vector search only + from utils import search_code_examples as search_code_examples_impl + + results = search_code_examples_impl( + client=supabase_client, + query=query, + match_count=match_count, + filter_metadata=filter_metadata + ) + + # Apply reranking if enabled + use_reranking = os.getenv("USE_RERANKING", "false") == "true" + if use_reranking and ctx.request_context.lifespan_context.reranking_model: + results = rerank_results(ctx.request_context.lifespan_context.reranking_model, query, results, content_key="content") + + # Format the results + formatted_results = [] + for result in results: + formatted_result = { + "url": result.get("url"), + "code": result.get("content"), + "summary": result.get("summary"), + "metadata": result.get("metadata"), + "source_id": result.get("source_id"), + "similarity": result.get("similarity") + } + # Include rerank score if available + if "rerank_score" in result: + formatted_result["rerank_score"] = result["rerank_score"] + formatted_results.append(formatted_result) + + return json.dumps({ + "success": True, + "query": query, + "source_filter": source_id, + "search_mode": "hybrid" if use_hybrid_search else "vector", + "reranking_applied": use_reranking and ctx.request_context.lifespan_context.reranking_model is not None, + "results": formatted_results, + "count": len(formatted_results) + }, indent=2) + except Exception as e: + return json.dumps({ + "success": False, + "query": query, + "error": str(e) + }, indent=2) + +@mcp.tool() +async def check_ai_script_hallucinations(ctx: Context, script_path: str) -> str: + """ + Check an AI-generated Python script for hallucinations using the knowledge graph. + + This tool analyzes a Python script for potential AI hallucinations by validating + imports, method calls, class instantiations, and function calls against a Neo4j + knowledge graph containing real repository data. + + The tool performs comprehensive analysis including: + - Import validation against known repositories + - Method call validation on classes from the knowledge graph + - Class instantiation parameter validation + - Function call parameter validation + - Attribute access validation + + Args: + ctx: The MCP server provided context + script_path: Absolute path to the Python script to analyze + + Returns: + JSON string with hallucination detection results, confidence scores, and recommendations + """ + try: + # Check if knowledge graph functionality is enabled + knowledge_graph_enabled = os.getenv("USE_KNOWLEDGE_GRAPH", "false") == "true" + if not knowledge_graph_enabled: + return json.dumps({ + "success": False, + "error": "Knowledge graph functionality is disabled. Set USE_KNOWLEDGE_GRAPH=true in environment." + }, indent=2) + + # Get the knowledge validator from context + knowledge_validator = ctx.request_context.lifespan_context.knowledge_validator + + if not knowledge_validator: + return json.dumps({ + "success": False, + "error": "Knowledge graph validator not available. Check Neo4j configuration in environment variables." + }, indent=2) + + # Validate script path + validation = validate_script_path(script_path) + if not validation["valid"]: + return json.dumps({ + "success": False, + "script_path": script_path, + "error": validation["error"] + }, indent=2) + + # Step 1: Analyze script structure using AST + analyzer = AIScriptAnalyzer() + analysis_result = analyzer.analyze_script(script_path) + + if analysis_result.errors: + print(f"Analysis warnings for {script_path}: {analysis_result.errors}") + + # Step 2: Validate against knowledge graph + validation_result = await knowledge_validator.validate_script(analysis_result) + + # Step 3: Generate comprehensive report + reporter = HallucinationReporter() + report = reporter.generate_comprehensive_report(validation_result) + + # Format response with comprehensive information + return json.dumps({ + "success": True, + "script_path": script_path, + "overall_confidence": validation_result.overall_confidence, + "validation_summary": { + "total_validations": report["validation_summary"]["total_validations"], + "valid_count": report["validation_summary"]["valid_count"], + "invalid_count": report["validation_summary"]["invalid_count"], + "uncertain_count": report["validation_summary"]["uncertain_count"], + "not_found_count": report["validation_summary"]["not_found_count"], + "hallucination_rate": report["validation_summary"]["hallucination_rate"] + }, + "hallucinations_detected": report["hallucinations_detected"], + "recommendations": report["recommendations"], + "analysis_metadata": { + "total_imports": report["analysis_metadata"]["total_imports"], + "total_classes": report["analysis_metadata"]["total_classes"], + "total_methods": report["analysis_metadata"]["total_methods"], + "total_attributes": report["analysis_metadata"]["total_attributes"], + "total_functions": report["analysis_metadata"]["total_functions"] + }, + "libraries_analyzed": report.get("libraries_analyzed", []) + }, indent=2) + + except Exception as e: + return json.dumps({ + "success": False, + "script_path": script_path, + "error": f"Analysis failed: {str(e)}" + }, indent=2) + +@mcp.tool() +async def query_knowledge_graph(ctx: Context, command: str) -> str: + """ + Query and explore the Neo4j knowledge graph containing repository data. + + This tool provides comprehensive access to the knowledge graph for exploring repositories, + classes, methods, functions, and their relationships. Perfect for understanding what data + is available for hallucination detection and debugging validation results. + + **āš ļø IMPORTANT: Always start with the `repos` command first!** + Before using any other commands, run `repos` to see what repositories are available + in your knowledge graph. This will help you understand what data you can explore. + + ## Available Commands: + + **Repository Commands:** + - `repos` - **START HERE!** List all repositories in the knowledge graph + - `explore ` - Get detailed overview of a specific repository + + **Class Commands:** + - `classes` - List all classes across all repositories (limited to 20) + - `classes ` - List classes in a specific repository + - `class ` - Get detailed information about a specific class including methods and attributes + + **Method Commands:** + - `method ` - Search for methods by name across all classes + - `method ` - Search for a method within a specific class + + **Custom Query:** + - `query ` - Execute a custom Cypher query (results limited to 20 records) + + ## Knowledge Graph Schema: + + **Node Types:** + - Repository: `(r:Repository {name: string})` + - File: `(f:File {path: string, module_name: string})` + - Class: `(c:Class {name: string, full_name: string})` + - Method: `(m:Method {name: string, params_list: [string], params_detailed: [string], return_type: string, args: [string]})` + - Function: `(func:Function {name: string, params_list: [string], params_detailed: [string], return_type: string, args: [string]})` + - Attribute: `(a:Attribute {name: string, type: string})` + + **Relationships:** + - `(r:Repository)-[:CONTAINS]->(f:File)` + - `(f:File)-[:DEFINES]->(c:Class)` + - `(c:Class)-[:HAS_METHOD]->(m:Method)` + - `(c:Class)-[:HAS_ATTRIBUTE]->(a:Attribute)` + - `(f:File)-[:DEFINES]->(func:Function)` + + ## Example Workflow: + ``` + 1. repos # See what repositories are available + 2. explore pydantic-ai # Explore a specific repository + 3. classes pydantic-ai # List classes in that repository + 4. class Agent # Explore the Agent class + 5. method run_stream # Search for run_stream method + 6. method __init__ Agent # Find Agent constructor + 7. query "MATCH (c:Class)-[:HAS_METHOD]->(m:Method) WHERE m.name = 'run' RETURN c.name, m.name LIMIT 5" + ``` + + Args: + ctx: The MCP server provided context + command: Command string to execute (see available commands above) + + Returns: + JSON string with query results, statistics, and metadata + """ + try: + # Check if knowledge graph functionality is enabled + knowledge_graph_enabled = os.getenv("USE_KNOWLEDGE_GRAPH", "false") == "true" + if not knowledge_graph_enabled: + return json.dumps({ + "success": False, + "error": "Knowledge graph functionality is disabled. Set USE_KNOWLEDGE_GRAPH=true in environment." + }, indent=2) + + # Get Neo4j driver from context + repo_extractor = ctx.request_context.lifespan_context.repo_extractor + if not repo_extractor or not repo_extractor.driver: + return json.dumps({ + "success": False, + "error": "Neo4j connection not available. Check Neo4j configuration in environment variables." + }, indent=2) + + # Parse command + command = command.strip() + if not command: + return json.dumps({ + "success": False, + "command": "", + "error": "Command cannot be empty. Available commands: repos, explore , classes [repo], class , method [class], query " + }, indent=2) + + parts = command.split() + cmd = parts[0].lower() + args = parts[1:] if len(parts) > 1 else [] + + async with repo_extractor.driver.session() as session: + # Route to appropriate handler + if cmd == "repos": + return await _handle_repos_command(session, command) + elif cmd == "explore": + if not args: + return json.dumps({ + "success": False, + "command": command, + "error": "Repository name required. Usage: explore " + }, indent=2) + return await _handle_explore_command(session, command, args[0]) + elif cmd == "classes": + repo_name = args[0] if args else None + return await _handle_classes_command(session, command, repo_name) + elif cmd == "class": + if not args: + return json.dumps({ + "success": False, + "command": command, + "error": "Class name required. Usage: class " + }, indent=2) + return await _handle_class_command(session, command, args[0]) + elif cmd == "method": + if not args: + return json.dumps({ + "success": False, + "command": command, + "error": "Method name required. Usage: method [class_name]" + }, indent=2) + method_name = args[0] + class_name = args[1] if len(args) > 1 else None + return await _handle_method_command(session, command, method_name, class_name) + elif cmd == "query": + if not args: + return json.dumps({ + "success": False, + "command": command, + "error": "Cypher query required. Usage: query " + }, indent=2) + cypher_query = " ".join(args) + return await _handle_query_command(session, command, cypher_query) + else: + return json.dumps({ + "success": False, + "command": command, + "error": f"Unknown command '{cmd}'. Available commands: repos, explore , classes [repo], class , method [class], query " + }, indent=2) + + except Exception as e: + return json.dumps({ + "success": False, + "command": command, + "error": f"Query execution failed: {str(e)}" + }, indent=2) + + +async def _handle_repos_command(session, command: str) -> str: + """Handle 'repos' command - list all repositories""" + query = "MATCH (r:Repository) RETURN r.name as name ORDER BY r.name" + result = await session.run(query) + + repos = [] + async for record in result: + repos.append(record['name']) + + return json.dumps({ + "success": True, + "command": command, + "data": { + "repositories": repos + }, + "metadata": { + "total_results": len(repos), + "limited": False + } + }, indent=2) + + +async def _handle_explore_command(session, command: str, repo_name: str) -> str: + """Handle 'explore ' command - get repository overview""" + # Check if repository exists + repo_check_query = "MATCH (r:Repository {name: $repo_name}) RETURN r.name as name" + result = await session.run(repo_check_query, repo_name=repo_name) + repo_record = await result.single() + + if not repo_record: + return json.dumps({ + "success": False, + "command": command, + "error": f"Repository '{repo_name}' not found in knowledge graph" + }, indent=2) + + # Get file count + files_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File) + RETURN count(f) as file_count + """ + result = await session.run(files_query, repo_name=repo_name) + file_count = (await result.single())['file_count'] + + # Get class count + classes_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + RETURN count(DISTINCT c) as class_count + """ + result = await session.run(classes_query, repo_name=repo_name) + class_count = (await result.single())['class_count'] + + # Get function count + functions_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(func:Function) + RETURN count(DISTINCT func) as function_count + """ + result = await session.run(functions_query, repo_name=repo_name) + function_count = (await result.single())['function_count'] + + # Get method count + methods_query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class)-[:HAS_METHOD]->(m:Method) + RETURN count(DISTINCT m) as method_count + """ + result = await session.run(methods_query, repo_name=repo_name) + method_count = (await result.single())['method_count'] + + return json.dumps({ + "success": True, + "command": command, + "data": { + "repository": repo_name, + "statistics": { + "files": file_count, + "classes": class_count, + "functions": function_count, + "methods": method_count + } + }, + "metadata": { + "total_results": 1, + "limited": False + } + }, indent=2) + + +async def _handle_classes_command(session, command: str, repo_name: str = None) -> str: + """Handle 'classes [repo]' command - list classes""" + limit = 20 + + if repo_name: + query = """ + MATCH (r:Repository {name: $repo_name})-[:CONTAINS]->(f:File)-[:DEFINES]->(c:Class) + RETURN c.name as name, c.full_name as full_name + ORDER BY c.name + LIMIT $limit + """ + result = await session.run(query, repo_name=repo_name, limit=limit) + else: + query = """ + MATCH (c:Class) + RETURN c.name as name, c.full_name as full_name + ORDER BY c.name + LIMIT $limit + """ + result = await session.run(query, limit=limit) + + classes = [] + async for record in result: + classes.append({ + 'name': record['name'], + 'full_name': record['full_name'] + }) + + return json.dumps({ + "success": True, + "command": command, + "data": { + "classes": classes, + "repository_filter": repo_name + }, + "metadata": { + "total_results": len(classes), + "limited": len(classes) >= limit + } + }, indent=2) + + +async def _handle_class_command(session, command: str, class_name: str) -> str: + """Handle 'class ' command - explore specific class""" + # Find the class + class_query = """ + MATCH (c:Class) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN c.name as name, c.full_name as full_name + LIMIT 1 + """ + result = await session.run(class_query, class_name=class_name) + class_record = await result.single() + + if not class_record: + return json.dumps({ + "success": False, + "command": command, + "error": f"Class '{class_name}' not found in knowledge graph" + }, indent=2) + + actual_name = class_record['name'] + full_name = class_record['full_name'] + + # Get methods + methods_query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN m.name as name, m.params_list as params_list, m.params_detailed as params_detailed, m.return_type as return_type + ORDER BY m.name + """ + result = await session.run(methods_query, class_name=class_name) + + methods = [] + async for record in result: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + methods.append({ + 'name': record['name'], + 'parameters': params_to_use, + 'return_type': record['return_type'] or 'Any' + }) + + # Get attributes + attributes_query = """ + MATCH (c:Class)-[:HAS_ATTRIBUTE]->(a:Attribute) + WHERE c.name = $class_name OR c.full_name = $class_name + RETURN a.name as name, a.type as type + ORDER BY a.name + """ + result = await session.run(attributes_query, class_name=class_name) + + attributes = [] + async for record in result: + attributes.append({ + 'name': record['name'], + 'type': record['type'] or 'Any' + }) + + return json.dumps({ + "success": True, + "command": command, + "data": { + "class": { + "name": actual_name, + "full_name": full_name, + "methods": methods, + "attributes": attributes + } + }, + "metadata": { + "total_results": 1, + "methods_count": len(methods), + "attributes_count": len(attributes), + "limited": False + } + }, indent=2) + + +async def _handle_method_command(session, command: str, method_name: str, class_name: str = None) -> str: + """Handle 'method [class]' command - search for methods""" + if class_name: + query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE (c.name = $class_name OR c.full_name = $class_name) + AND m.name = $method_name + RETURN c.name as class_name, c.full_name as class_full_name, + m.name as method_name, m.params_list as params_list, + m.params_detailed as params_detailed, m.return_type as return_type, m.args as args + """ + result = await session.run(query, class_name=class_name, method_name=method_name) + else: + query = """ + MATCH (c:Class)-[:HAS_METHOD]->(m:Method) + WHERE m.name = $method_name + RETURN c.name as class_name, c.full_name as class_full_name, + m.name as method_name, m.params_list as params_list, + m.params_detailed as params_detailed, m.return_type as return_type, m.args as args + ORDER BY c.name + LIMIT 20 + """ + result = await session.run(query, method_name=method_name) + + methods = [] + async for record in result: + # Use detailed params if available, fall back to simple params + params_to_use = record['params_detailed'] or record['params_list'] or [] + methods.append({ + 'class_name': record['class_name'], + 'class_full_name': record['class_full_name'], + 'method_name': record['method_name'], + 'parameters': params_to_use, + 'return_type': record['return_type'] or 'Any', + 'legacy_args': record['args'] or [] + }) + + if not methods: + return json.dumps({ + "success": False, + "command": command, + "error": f"Method '{method_name}'" + (f" in class '{class_name}'" if class_name else "") + " not found" + }, indent=2) + + return json.dumps({ + "success": True, + "command": command, + "data": { + "methods": methods, + "class_filter": class_name + }, + "metadata": { + "total_results": len(methods), + "limited": len(methods) >= 20 and not class_name + } + }, indent=2) + + +async def _handle_query_command(session, command: str, cypher_query: str) -> str: + """Handle 'query ' command - execute custom Cypher query""" + try: + # Execute the query with a limit to prevent overwhelming responses + result = await session.run(cypher_query) + + records = [] + count = 0 + async for record in result: + records.append(dict(record)) + count += 1 + if count >= 20: # Limit results to prevent overwhelming responses + break + + return json.dumps({ + "success": True, + "command": command, + "data": { + "query": cypher_query, + "results": records + }, + "metadata": { + "total_results": len(records), + "limited": len(records) >= 20 + } + }, indent=2) + + except Exception as e: + return json.dumps({ + "success": False, + "command": command, + "error": f"Cypher query error: {str(e)}", + "data": { + "query": cypher_query + } + }, indent=2) + + +@mcp.tool() +async def parse_github_repository(ctx: Context, repo_url: str) -> str: + """ + Parse a GitHub repository into the Neo4j knowledge graph. + + This tool clones a GitHub repository, analyzes its Python files, and stores + the code structure (classes, methods, functions, imports) in Neo4j for use + in hallucination detection. The tool: + + - Clones the repository to a temporary location + - Analyzes Python files to extract code structure + - Stores classes, methods, functions, and imports in Neo4j + - Provides detailed statistics about the parsing results + - Automatically handles module name detection for imports + + Args: + ctx: The MCP server provided context + repo_url: GitHub repository URL (e.g., 'https://github.com/user/repo.git') + + Returns: + JSON string with parsing results, statistics, and repository information + """ + try: + # Check if knowledge graph functionality is enabled + knowledge_graph_enabled = os.getenv("USE_KNOWLEDGE_GRAPH", "false") == "true" + if not knowledge_graph_enabled: + return json.dumps({ + "success": False, + "error": "Knowledge graph functionality is disabled. Set USE_KNOWLEDGE_GRAPH=true in environment." + }, indent=2) + + # Get the repository extractor from context + repo_extractor = ctx.request_context.lifespan_context.repo_extractor + + if not repo_extractor: + return json.dumps({ + "success": False, + "error": "Repository extractor not available. Check Neo4j configuration in environment variables." + }, indent=2) + + # Validate repository URL + validation = validate_github_url(repo_url) + if not validation["valid"]: + return json.dumps({ + "success": False, + "repo_url": repo_url, + "error": validation["error"] + }, indent=2) + + repo_name = validation["repo_name"] + + # Parse the repository (this includes cloning, analysis, and Neo4j storage) + print(f"Starting repository analysis for: {repo_name}") + await repo_extractor.analyze_repository(repo_url) + print(f"Repository analysis completed for: {repo_name}") + + # Query Neo4j for statistics about the parsed repository + async with repo_extractor.driver.session() as session: + # Get comprehensive repository statistics + stats_query = """ + MATCH (r:Repository {name: $repo_name}) + OPTIONAL MATCH (r)-[:CONTAINS]->(f:File) + OPTIONAL MATCH (f)-[:DEFINES]->(c:Class) + OPTIONAL MATCH (c)-[:HAS_METHOD]->(m:Method) + OPTIONAL MATCH (f)-[:DEFINES]->(func:Function) + OPTIONAL MATCH (c)-[:HAS_ATTRIBUTE]->(a:Attribute) + WITH r, + count(DISTINCT f) as files_count, + count(DISTINCT c) as classes_count, + count(DISTINCT m) as methods_count, + count(DISTINCT func) as functions_count, + count(DISTINCT a) as attributes_count + + // Get some sample module names + OPTIONAL MATCH (r)-[:CONTAINS]->(sample_f:File) + WITH r, files_count, classes_count, methods_count, functions_count, attributes_count, + collect(DISTINCT sample_f.module_name)[0..5] as sample_modules + + RETURN + r.name as repo_name, + files_count, + classes_count, + methods_count, + functions_count, + attributes_count, + sample_modules + """ + + result = await session.run(stats_query, repo_name=repo_name) + record = await result.single() + + if record: + stats = { + "repository": record['repo_name'], + "files_processed": record['files_count'], + "classes_created": record['classes_count'], + "methods_created": record['methods_count'], + "functions_created": record['functions_count'], + "attributes_created": record['attributes_count'], + "sample_modules": record['sample_modules'] or [] + } + else: + return json.dumps({ + "success": False, + "repo_url": repo_url, + "error": f"Repository '{repo_name}' not found in database after parsing" + }, indent=2) + + return json.dumps({ + "success": True, + "repo_url": repo_url, + "repo_name": repo_name, + "message": f"Successfully parsed repository '{repo_name}' into knowledge graph", + "statistics": stats, + "ready_for_validation": True, + "next_steps": [ + "Repository is now available for hallucination detection", + f"Use check_ai_script_hallucinations to validate scripts against {repo_name}", + "The knowledge graph contains classes, methods, and functions from this repository" + ] + }, indent=2) + + except Exception as e: + return json.dumps({ + "success": False, + "repo_url": repo_url, + "error": f"Repository parsing failed: {str(e)}" + }, indent=2) + +async def crawl_markdown_file(crawler: AsyncWebCrawler, url: str) -> List[Dict[str, Any]]: + """ + Crawl a .txt or markdown file. + + Args: + crawler: AsyncWebCrawler instance + url: URL of the file + + Returns: + List of dictionaries with URL and markdown content + """ + crawl_config = CrawlerRunConfig() + + result = await crawler.arun(url=url, config=crawl_config) + if result.success and result.markdown: + return [{'url': url, 'markdown': result.markdown}] + else: + print(f"Failed to crawl {url}: {result.error_message}") + return [] + +async def crawl_batch(crawler: AsyncWebCrawler, urls: List[str], max_concurrent: int = 10) -> List[Dict[str, Any]]: + """ + Batch crawl multiple URLs in parallel. + + Args: + crawler: AsyncWebCrawler instance + urls: List of URLs to crawl + max_concurrent: Maximum number of concurrent browser sessions + + Returns: + List of dictionaries with URL and markdown content + """ + crawl_config = CrawlerRunConfig(cache_mode=CacheMode.BYPASS, stream=False) + dispatcher = MemoryAdaptiveDispatcher( + memory_threshold_percent=70.0, + check_interval=1.0, + max_session_permit=max_concurrent + ) + + results = await crawler.arun_many(urls=urls, config=crawl_config, dispatcher=dispatcher) + return [{'url': r.url, 'markdown': r.markdown} for r in results if r.success and r.markdown] + +async def crawl_recursive_internal_links(crawler: AsyncWebCrawler, start_urls: List[str], max_depth: int = 3, max_concurrent: int = 10) -> List[Dict[str, Any]]: + """ + Recursively crawl internal links from start URLs up to a maximum depth. + + Args: + crawler: AsyncWebCrawler instance + start_urls: List of starting URLs + max_depth: Maximum recursion depth + max_concurrent: Maximum number of concurrent browser sessions + + Returns: + List of dictionaries with URL and markdown content + """ + run_config = CrawlerRunConfig(cache_mode=CacheMode.BYPASS, stream=False) + dispatcher = MemoryAdaptiveDispatcher( + memory_threshold_percent=70.0, + check_interval=1.0, + max_session_permit=max_concurrent + ) + + visited = set() + + def normalize_url(url): + return urldefrag(url)[0] + + current_urls = set([normalize_url(u) for u in start_urls]) + results_all = [] + + for depth in range(max_depth): + urls_to_crawl = [normalize_url(url) for url in current_urls if normalize_url(url) not in visited] + if not urls_to_crawl: + break + + results = await crawler.arun_many(urls=urls_to_crawl, config=run_config, dispatcher=dispatcher) + next_level_urls = set() + + for result in results: + norm_url = normalize_url(result.url) + visited.add(norm_url) + + if result.success and result.markdown: + results_all.append({'url': result.url, 'markdown': result.markdown}) + for link in result.links.get("internal", []): + next_url = normalize_url(link["href"]) + if next_url not in visited: + next_level_urls.add(next_url) + + current_urls = next_level_urls + + return results_all + +async def main(): + transport = os.getenv("TRANSPORT", "sse") + if transport == 'sse': + # Run the MCP server with sse transport + await mcp.run_sse_async() + else: + # Run the MCP server with stdio transport + await mcp.run_stdio_async() + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/src/utils.py b/servers/mcp-crawl4ai-rag/src/utils.py new file mode 100644 index 0000000..64851df --- /dev/null +++ b/servers/mcp-crawl4ai-rag/src/utils.py @@ -0,0 +1,749 @@ +""" +Utility functions for the Crawl4AI MCP server. +""" +import os +import concurrent.futures +from typing import List, Dict, Any, Optional, Tuple +import json +from supabase import create_client, Client +from urllib.parse import urlparse +import openai +import re +import time + +# Embedding configuration - allows using custom OpenAI-compatible endpoints +EMBEDDING_API_BASE = os.getenv("EMBEDDING_API_BASE", "https://api.openai.com/v1") +EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "text-embedding-3-small") +EMBEDDING_DIMENSION = int(os.getenv("EMBEDDING_DIMENSION", "1536")) + +# Initialize OpenAI client with custom base URL support +openai_client = openai.OpenAI( + api_key=os.getenv("OPENAI_API_KEY", "EMPTY"), + base_url=EMBEDDING_API_BASE +) + +# Also set global api_key for chat completions (used for contextual embeddings) +openai.api_key = os.getenv("OPENAI_API_KEY") + +def get_supabase_client() -> Client: + """ + Get a Supabase client with the URL and key from environment variables. + + Returns: + Supabase client instance + """ + url = os.getenv("SUPABASE_URL") + key = os.getenv("SUPABASE_SERVICE_KEY") + + if not url or not key: + raise ValueError("SUPABASE_URL and SUPABASE_SERVICE_KEY must be set in environment variables") + + return create_client(url, key) + +def create_embeddings_batch(texts: List[str]) -> List[List[float]]: + """ + Create embeddings for multiple texts in a single API call. + + Args: + texts: List of texts to create embeddings for + + Returns: + List of embeddings (each embedding is a list of floats) + """ + if not texts: + return [] + + max_retries = 3 + retry_delay = 1.0 # Start with 1 second delay + + for retry in range(max_retries): + try: + response = openai_client.embeddings.create( + model=EMBEDDING_MODEL, + input=texts + ) + return [item.embedding for item in response.data] + except Exception as e: + if retry < max_retries - 1: + print(f"Error creating batch embeddings (attempt {retry + 1}/{max_retries}): {e}") + print(f"Retrying in {retry_delay} seconds...") + time.sleep(retry_delay) + retry_delay *= 2 # Exponential backoff + else: + print(f"Failed to create batch embeddings after {max_retries} attempts: {e}") + # Try creating embeddings one by one as fallback + print("Attempting to create embeddings individually...") + embeddings = [] + successful_count = 0 + + for i, text in enumerate(texts): + try: + individual_response = openai_client.embeddings.create( + model=EMBEDDING_MODEL, + input=[text] + ) + embeddings.append(individual_response.data[0].embedding) + successful_count += 1 + except Exception as individual_error: + print(f"Failed to create embedding for text {i}: {individual_error}") + # Add zero embedding as fallback + embeddings.append([0.0] * EMBEDDING_DIMENSION) + + print(f"Successfully created {successful_count}/{len(texts)} embeddings individually") + return embeddings + +def create_embedding(text: str) -> List[float]: + """ + Create an embedding for a single text using OpenAI's API. + + Args: + text: Text to create an embedding for + + Returns: + List of floats representing the embedding + """ + try: + embeddings = create_embeddings_batch([text]) + return embeddings[0] if embeddings else [0.0] * EMBEDDING_DIMENSION + except Exception as e: + print(f"Error creating embedding: {e}") + # Return empty embedding if there's an error + return [0.0] * EMBEDDING_DIMENSION + +def generate_contextual_embedding(full_document: str, chunk: str) -> Tuple[str, bool]: + """ + Generate contextual information for a chunk within a document to improve retrieval. + + Args: + full_document: The complete document text + chunk: The specific chunk of text to generate context for + + Returns: + Tuple containing: + - The contextual text that situates the chunk within the document + - Boolean indicating if contextual embedding was performed + """ + model_choice = os.getenv("MODEL_CHOICE") + + try: + # Create the prompt for generating contextual information + prompt = f""" +{full_document[:25000]} + +Here is the chunk we want to situate within the whole document + +{chunk} + +Please give a short succinct context to situate this chunk within the overall document for the purposes of improving search retrieval of the chunk. Answer only with the succinct context and nothing else.""" + + # Call the OpenAI API to generate contextual information + response = openai.chat.completions.create( + model=model_choice, + messages=[ + {"role": "system", "content": "You are a helpful assistant that provides concise contextual information."}, + {"role": "user", "content": prompt} + ], + temperature=0.3, + max_tokens=200 + ) + + # Extract the generated context + context = response.choices[0].message.content.strip() + + # Combine the context with the original chunk + contextual_text = f"{context}\n---\n{chunk}" + + return contextual_text, True + + except Exception as e: + print(f"Error generating contextual embedding: {e}. Using original chunk instead.") + return chunk, False + +def process_chunk_with_context(args): + """ + Process a single chunk with contextual embedding. + This function is designed to be used with concurrent.futures. + + Args: + args: Tuple containing (url, content, full_document) + + Returns: + Tuple containing: + - The contextual text that situates the chunk within the document + - Boolean indicating if contextual embedding was performed + """ + url, content, full_document = args + return generate_contextual_embedding(full_document, content) + +def add_documents_to_supabase( + client: Client, + urls: List[str], + chunk_numbers: List[int], + contents: List[str], + metadatas: List[Dict[str, Any]], + url_to_full_document: Dict[str, str], + batch_size: int = 20 +) -> None: + """ + Add documents to the Supabase crawled_pages table in batches. + Deletes existing records with the same URLs before inserting to prevent duplicates. + + Args: + client: Supabase client + urls: List of URLs + chunk_numbers: List of chunk numbers + contents: List of document contents + metadatas: List of document metadata + url_to_full_document: Dictionary mapping URLs to their full document content + batch_size: Size of each batch for insertion + """ + # Get unique URLs to delete existing records + unique_urls = list(set(urls)) + + # Delete existing records for these URLs in a single operation + try: + if unique_urls: + # Use the .in_() filter to delete all records with matching URLs + client.table("crawled_pages").delete().in_("url", unique_urls).execute() + except Exception as e: + print(f"Batch delete failed: {e}. Trying one-by-one deletion as fallback.") + # Fallback: delete records one by one + for url in unique_urls: + try: + client.table("crawled_pages").delete().eq("url", url).execute() + except Exception as inner_e: + print(f"Error deleting record for URL {url}: {inner_e}") + # Continue with the next URL even if one fails + + # Check if MODEL_CHOICE is set for contextual embeddings + use_contextual_embeddings = os.getenv("USE_CONTEXTUAL_EMBEDDINGS", "false") == "true" + print(f"\n\nUse contextual embeddings: {use_contextual_embeddings}\n\n") + + # Process in batches to avoid memory issues + for i in range(0, len(contents), batch_size): + batch_end = min(i + batch_size, len(contents)) + + # Get batch slices + batch_urls = urls[i:batch_end] + batch_chunk_numbers = chunk_numbers[i:batch_end] + batch_contents = contents[i:batch_end] + batch_metadatas = metadatas[i:batch_end] + + # Apply contextual embedding to each chunk if MODEL_CHOICE is set + if use_contextual_embeddings: + # Prepare arguments for parallel processing + process_args = [] + for j, content in enumerate(batch_contents): + url = batch_urls[j] + full_document = url_to_full_document.get(url, "") + process_args.append((url, content, full_document)) + + # Process in parallel using ThreadPoolExecutor + contextual_contents = [] + with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: + # Submit all tasks and collect results + future_to_idx = {executor.submit(process_chunk_with_context, arg): idx + for idx, arg in enumerate(process_args)} + + # Process results as they complete + for future in concurrent.futures.as_completed(future_to_idx): + idx = future_to_idx[future] + try: + result, success = future.result() + contextual_contents.append(result) + if success: + batch_metadatas[idx]["contextual_embedding"] = True + except Exception as e: + print(f"Error processing chunk {idx}: {e}") + # Use original content as fallback + contextual_contents.append(batch_contents[idx]) + + # Sort results back into original order if needed + if len(contextual_contents) != len(batch_contents): + print(f"Warning: Expected {len(batch_contents)} results but got {len(contextual_contents)}") + # Use original contents as fallback + contextual_contents = batch_contents + else: + # If not using contextual embeddings, use original contents + contextual_contents = batch_contents + + # Create embeddings for the entire batch at once + batch_embeddings = create_embeddings_batch(contextual_contents) + + batch_data = [] + for j in range(len(contextual_contents)): + # Extract metadata fields + chunk_size = len(contextual_contents[j]) + + # Extract source_id from URL + parsed_url = urlparse(batch_urls[j]) + source_id = parsed_url.netloc or parsed_url.path + + # Prepare data for insertion + data = { + "url": batch_urls[j], + "chunk_number": batch_chunk_numbers[j], + "content": contextual_contents[j], # Store original content + "metadata": { + "chunk_size": chunk_size, + **batch_metadatas[j] + }, + "source_id": source_id, # Add source_id field + "embedding": batch_embeddings[j] # Use embedding from contextual content + } + + batch_data.append(data) + + # Insert batch into Supabase with retry logic + max_retries = 3 + retry_delay = 1.0 # Start with 1 second delay + + for retry in range(max_retries): + try: + client.table("crawled_pages").insert(batch_data).execute() + # Success - break out of retry loop + break + except Exception as e: + if retry < max_retries - 1: + print(f"Error inserting batch into Supabase (attempt {retry + 1}/{max_retries}): {e}") + print(f"Retrying in {retry_delay} seconds...") + time.sleep(retry_delay) + retry_delay *= 2 # Exponential backoff + else: + # Final attempt failed + print(f"Failed to insert batch after {max_retries} attempts: {e}") + # Optionally, try inserting records one by one as a last resort + print("Attempting to insert records individually...") + successful_inserts = 0 + for record in batch_data: + try: + client.table("crawled_pages").insert(record).execute() + successful_inserts += 1 + except Exception as individual_error: + print(f"Failed to insert individual record for URL {record['url']}: {individual_error}") + + if successful_inserts > 0: + print(f"Successfully inserted {successful_inserts}/{len(batch_data)} records individually") + +def search_documents( + client: Client, + query: str, + match_count: int = 10, + filter_metadata: Optional[Dict[str, Any]] = None +) -> List[Dict[str, Any]]: + """ + Search for documents in Supabase using vector similarity. + + Args: + client: Supabase client + query: Query text + match_count: Maximum number of results to return + filter_metadata: Optional metadata filter + + Returns: + List of matching documents + """ + # Create embedding for the query + query_embedding = create_embedding(query) + + # Execute the search using the match_crawled_pages function + try: + # Only include filter parameter if filter_metadata is provided and not empty + params = { + 'query_embedding': query_embedding, + 'match_count': match_count + } + + # Only add the filter if it's actually provided and not empty + if filter_metadata: + params['filter'] = filter_metadata # Pass the dictionary directly, not JSON-encoded + + result = client.rpc('match_crawled_pages', params).execute() + + return result.data + except Exception as e: + print(f"Error searching documents: {e}") + return [] + + +def extract_code_blocks(markdown_content: str, min_length: int = 1000) -> List[Dict[str, Any]]: + """ + Extract code blocks from markdown content along with context. + + Args: + markdown_content: The markdown content to extract code blocks from + min_length: Minimum length of code blocks to extract (default: 1000 characters) + + Returns: + List of dictionaries containing code blocks and their context + """ + code_blocks = [] + + # Skip if content starts with triple backticks (edge case for files wrapped in backticks) + content = markdown_content.strip() + start_offset = 0 + if content.startswith('```'): + # Skip the first triple backticks + start_offset = 3 + print("Skipping initial triple backticks") + + # Find all occurrences of triple backticks + backtick_positions = [] + pos = start_offset + while True: + pos = markdown_content.find('```', pos) + if pos == -1: + break + backtick_positions.append(pos) + pos += 3 + + # Process pairs of backticks + i = 0 + while i < len(backtick_positions) - 1: + start_pos = backtick_positions[i] + end_pos = backtick_positions[i + 1] + + # Extract the content between backticks + code_section = markdown_content[start_pos+3:end_pos] + + # Check if there's a language specifier on the first line + lines = code_section.split('\n', 1) + if len(lines) > 1: + # Check if first line is a language specifier (no spaces, common language names) + first_line = lines[0].strip() + if first_line and not ' ' in first_line and len(first_line) < 20: + language = first_line + code_content = lines[1].strip() if len(lines) > 1 else "" + else: + language = "" + code_content = code_section.strip() + else: + language = "" + code_content = code_section.strip() + + # Skip if code block is too short + if len(code_content) < min_length: + i += 2 # Move to next pair + continue + + # Extract context before (1000 chars) + context_start = max(0, start_pos - 1000) + context_before = markdown_content[context_start:start_pos].strip() + + # Extract context after (1000 chars) + context_end = min(len(markdown_content), end_pos + 3 + 1000) + context_after = markdown_content[end_pos + 3:context_end].strip() + + code_blocks.append({ + 'code': code_content, + 'language': language, + 'context_before': context_before, + 'context_after': context_after, + 'full_context': f"{context_before}\n\n{code_content}\n\n{context_after}" + }) + + # Move to next pair (skip the closing backtick we just processed) + i += 2 + + return code_blocks + + +def generate_code_example_summary(code: str, context_before: str, context_after: str) -> str: + """ + Generate a summary for a code example using its surrounding context. + + Args: + code: The code example + context_before: Context before the code + context_after: Context after the code + + Returns: + A summary of what the code example demonstrates + """ + model_choice = os.getenv("MODEL_CHOICE") + + # Create the prompt + prompt = f""" +{context_before[-500:] if len(context_before) > 500 else context_before} + + + +{code[:1500] if len(code) > 1500 else code} + + + +{context_after[:500] if len(context_after) > 500 else context_after} + + +Based on the code example and its surrounding context, provide a concise summary (2-3 sentences) that describes what this code example demonstrates and its purpose. Focus on the practical application and key concepts illustrated. +""" + + try: + response = openai.chat.completions.create( + model=model_choice, + messages=[ + {"role": "system", "content": "You are a helpful assistant that provides concise code example summaries."}, + {"role": "user", "content": prompt} + ], + temperature=0.3, + max_tokens=100 + ) + + return response.choices[0].message.content.strip() + + except Exception as e: + print(f"Error generating code example summary: {e}") + return "Code example for demonstration purposes." + + +def add_code_examples_to_supabase( + client: Client, + urls: List[str], + chunk_numbers: List[int], + code_examples: List[str], + summaries: List[str], + metadatas: List[Dict[str, Any]], + batch_size: int = 20 +): + """ + Add code examples to the Supabase code_examples table in batches. + + Args: + client: Supabase client + urls: List of URLs + chunk_numbers: List of chunk numbers + code_examples: List of code example contents + summaries: List of code example summaries + metadatas: List of metadata dictionaries + batch_size: Size of each batch for insertion + """ + if not urls: + return + + # Delete existing records for these URLs + unique_urls = list(set(urls)) + for url in unique_urls: + try: + client.table('code_examples').delete().eq('url', url).execute() + except Exception as e: + print(f"Error deleting existing code examples for {url}: {e}") + + # Process in batches + total_items = len(urls) + for i in range(0, total_items, batch_size): + batch_end = min(i + batch_size, total_items) + batch_texts = [] + + # Create combined texts for embedding (code + summary) + for j in range(i, batch_end): + combined_text = f"{code_examples[j]}\n\nSummary: {summaries[j]}" + batch_texts.append(combined_text) + + # Create embeddings for the batch + embeddings = create_embeddings_batch(batch_texts) + + # Check if embeddings are valid (not all zeros) + valid_embeddings = [] + for embedding in embeddings: + if embedding and not all(v == 0.0 for v in embedding): + valid_embeddings.append(embedding) + else: + print(f"Warning: Zero or invalid embedding detected, creating new one...") + # Try to create a single embedding as fallback + single_embedding = create_embedding(batch_texts[len(valid_embeddings)]) + valid_embeddings.append(single_embedding) + + # Prepare batch data + batch_data = [] + for j, embedding in enumerate(valid_embeddings): + idx = i + j + + # Extract source_id from URL + parsed_url = urlparse(urls[idx]) + source_id = parsed_url.netloc or parsed_url.path + + batch_data.append({ + 'url': urls[idx], + 'chunk_number': chunk_numbers[idx], + 'content': code_examples[idx], + 'summary': summaries[idx], + 'metadata': metadatas[idx], # Store as JSON object, not string + 'source_id': source_id, + 'embedding': embedding + }) + + # Insert batch into Supabase with retry logic + max_retries = 3 + retry_delay = 1.0 # Start with 1 second delay + + for retry in range(max_retries): + try: + client.table('code_examples').insert(batch_data).execute() + # Success - break out of retry loop + break + except Exception as e: + if retry < max_retries - 1: + print(f"Error inserting batch into Supabase (attempt {retry + 1}/{max_retries}): {e}") + print(f"Retrying in {retry_delay} seconds...") + time.sleep(retry_delay) + retry_delay *= 2 # Exponential backoff + else: + # Final attempt failed + print(f"Failed to insert batch after {max_retries} attempts: {e}") + # Optionally, try inserting records one by one as a last resort + print("Attempting to insert records individually...") + successful_inserts = 0 + for record in batch_data: + try: + client.table('code_examples').insert(record).execute() + successful_inserts += 1 + except Exception as individual_error: + print(f"Failed to insert individual record for URL {record['url']}: {individual_error}") + + if successful_inserts > 0: + print(f"Successfully inserted {successful_inserts}/{len(batch_data)} records individually") + print(f"Inserted batch {i//batch_size + 1} of {(total_items + batch_size - 1)//batch_size} code examples") + + +def update_source_info(client: Client, source_id: str, summary: str, word_count: int): + """ + Update or insert source information in the sources table. + + Args: + client: Supabase client + source_id: The source ID (domain) + summary: Summary of the source + word_count: Total word count for the source + """ + try: + # Try to update existing source + result = client.table('sources').update({ + 'summary': summary, + 'total_word_count': word_count, + 'updated_at': 'now()' + }).eq('source_id', source_id).execute() + + # If no rows were updated, insert new source + if not result.data: + client.table('sources').insert({ + 'source_id': source_id, + 'summary': summary, + 'total_word_count': word_count + }).execute() + print(f"Created new source: {source_id}") + else: + print(f"Updated source: {source_id}") + + except Exception as e: + print(f"Error updating source {source_id}: {e}") + + +def extract_source_summary(source_id: str, content: str, max_length: int = 500) -> str: + """ + Extract a summary for a source from its content using an LLM. + + This function uses the OpenAI API to generate a concise summary of the source content. + + Args: + source_id: The source ID (domain) + content: The content to extract a summary from + max_length: Maximum length of the summary + + Returns: + A summary string + """ + # Default summary if we can't extract anything meaningful + default_summary = f"Content from {source_id}" + + if not content or len(content.strip()) == 0: + return default_summary + + # Get the model choice from environment variables + model_choice = os.getenv("MODEL_CHOICE") + + # Limit content length to avoid token limits + truncated_content = content[:25000] if len(content) > 25000 else content + + # Create the prompt for generating the summary + prompt = f""" +{truncated_content} + + +The above content is from the documentation for '{source_id}'. Please provide a concise summary (3-5 sentences) that describes what this library/tool/framework is about. The summary should help understand what the library/tool/framework accomplishes and the purpose. +""" + + try: + # Call the OpenAI API to generate the summary + response = openai.chat.completions.create( + model=model_choice, + messages=[ + {"role": "system", "content": "You are a helpful assistant that provides concise library/tool/framework summaries."}, + {"role": "user", "content": prompt} + ], + temperature=0.3, + max_tokens=150 + ) + + # Extract the generated summary + summary = response.choices[0].message.content.strip() + + # Ensure the summary is not too long + if len(summary) > max_length: + summary = summary[:max_length] + "..." + + return summary + + except Exception as e: + print(f"Error generating summary with LLM for {source_id}: {e}. Using default summary.") + return default_summary + + +def search_code_examples( + client: Client, + query: str, + match_count: int = 10, + filter_metadata: Optional[Dict[str, Any]] = None, + source_id: Optional[str] = None +) -> List[Dict[str, Any]]: + """ + Search for code examples in Supabase using vector similarity. + + Args: + client: Supabase client + query: Query text + match_count: Maximum number of results to return + filter_metadata: Optional metadata filter + source_id: Optional source ID to filter results + + Returns: + List of matching code examples + """ + # Create a more descriptive query for better embedding match + # Since code examples are embedded with their summaries, we should make the query more descriptive + enhanced_query = f"Code example for {query}\n\nSummary: Example code showing {query}" + + # Create embedding for the enhanced query + query_embedding = create_embedding(enhanced_query) + + # Execute the search using the match_code_examples function + try: + # Only include filter parameter if filter_metadata is provided and not empty + params = { + 'query_embedding': query_embedding, + 'match_count': match_count + } + + # Only add the filter if it's actually provided and not empty + if filter_metadata: + params['filter'] = filter_metadata + + # Add source filter if provided + if source_id: + params['source_filter'] = source_id + + result = client.rpc('match_code_examples', params).execute() + + return result.data + except Exception as e: + print(f"Error searching code examples: {e}") + return [] \ No newline at end of file diff --git a/servers/mcp-crawl4ai-rag/uv.lock b/servers/mcp-crawl4ai-rag/uv.lock new file mode 100644 index 0000000..8928d89 --- /dev/null +++ b/servers/mcp-crawl4ai-rag/uv.lock @@ -0,0 +1,2348 @@ +version = 1 +revision = 1 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", +] + +[[package]] +name = "aiofiles" +version = "24.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, +] + +[[package]] +name = "aiohttp" +version = "3.11.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/e7/fa1a8c00e2c54b05dc8cb5d1439f627f7c267874e3f7bb047146116020f9/aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a", size = 7678653 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/d2/5bc436f42bf4745c55f33e1e6a2d69e77075d3e768e3d1a34f96ee5298aa/aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2", size = 706671 }, + { url = "https://files.pythonhosted.org/packages/fe/d0/2dbabecc4e078c0474abb40536bbde717fb2e39962f41c5fc7a216b18ea7/aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508", size = 466169 }, + { url = "https://files.pythonhosted.org/packages/70/84/19edcf0b22933932faa6e0be0d933a27bd173da02dc125b7354dff4d8da4/aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e", size = 457554 }, + { url = "https://files.pythonhosted.org/packages/32/d0/e8d1f034ae5624a0f21e4fb3feff79342ce631f3a4d26bd3e58b31ef033b/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f", size = 1690154 }, + { url = "https://files.pythonhosted.org/packages/16/de/2f9dbe2ac6f38f8495562077131888e0d2897e3798a0ff3adda766b04a34/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f", size = 1733402 }, + { url = "https://files.pythonhosted.org/packages/e0/04/bd2870e1e9aef990d14b6df2a695f17807baf5c85a4c187a492bda569571/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec", size = 1783958 }, + { url = "https://files.pythonhosted.org/packages/23/06/4203ffa2beb5bedb07f0da0f79b7d9039d1c33f522e0d1a2d5b6218e6f2e/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6", size = 1695288 }, + { url = "https://files.pythonhosted.org/packages/30/b2/e2285dda065d9f29ab4b23d8bcc81eb881db512afb38a3f5247b191be36c/aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009", size = 1618871 }, + { url = "https://files.pythonhosted.org/packages/57/e0/88f2987885d4b646de2036f7296ebea9268fdbf27476da551c1a7c158bc0/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4", size = 1646262 }, + { url = "https://files.pythonhosted.org/packages/e0/19/4d2da508b4c587e7472a032290b2981f7caeca82b4354e19ab3df2f51d56/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9", size = 1677431 }, + { url = "https://files.pythonhosted.org/packages/eb/ae/047473ea50150a41440f3265f53db1738870b5a1e5406ece561ca61a3bf4/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb", size = 1637430 }, + { url = "https://files.pythonhosted.org/packages/11/32/c6d1e3748077ce7ee13745fae33e5cb1dac3e3b8f8787bf738a93c94a7d2/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda", size = 1703342 }, + { url = "https://files.pythonhosted.org/packages/c5/1d/a3b57bfdbe285f0d45572d6d8f534fd58761da3e9cbc3098372565005606/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1", size = 1740600 }, + { url = "https://files.pythonhosted.org/packages/a5/71/f9cd2fed33fa2b7ce4d412fb7876547abb821d5b5520787d159d0748321d/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea", size = 1695131 }, + { url = "https://files.pythonhosted.org/packages/97/97/d1248cd6d02b9de6aa514793d0dcb20099f0ec47ae71a933290116c070c5/aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8", size = 412442 }, + { url = "https://files.pythonhosted.org/packages/33/9a/e34e65506e06427b111e19218a99abf627638a9703f4b8bcc3e3021277ed/aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8", size = 439444 }, + { url = "https://files.pythonhosted.org/packages/0a/18/be8b5dd6b9cf1b2172301dbed28e8e5e878ee687c21947a6c81d6ceaa15d/aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811", size = 699833 }, + { url = "https://files.pythonhosted.org/packages/0d/84/ecdc68e293110e6f6f6d7b57786a77555a85f70edd2b180fb1fafaff361a/aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804", size = 462774 }, + { url = "https://files.pythonhosted.org/packages/d7/85/f07718cca55884dad83cc2433746384d267ee970e91f0dcc75c6d5544079/aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd", size = 454429 }, + { url = "https://files.pythonhosted.org/packages/82/02/7f669c3d4d39810db8842c4e572ce4fe3b3a9b82945fdd64affea4c6947e/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c", size = 1670283 }, + { url = "https://files.pythonhosted.org/packages/ec/79/b82a12f67009b377b6c07a26bdd1b81dab7409fc2902d669dbfa79e5ac02/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118", size = 1717231 }, + { url = "https://files.pythonhosted.org/packages/a6/38/d5a1f28c3904a840642b9a12c286ff41fc66dfa28b87e204b1f242dbd5e6/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1", size = 1769621 }, + { url = "https://files.pythonhosted.org/packages/53/2d/deb3749ba293e716b5714dda06e257f123c5b8679072346b1eb28b766a0b/aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000", size = 1678667 }, + { url = "https://files.pythonhosted.org/packages/b8/a8/04b6e11683a54e104b984bd19a9790eb1ae5f50968b601bb202d0406f0ff/aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137", size = 1601592 }, + { url = "https://files.pythonhosted.org/packages/5e/9d/c33305ae8370b789423623f0e073d09ac775cd9c831ac0f11338b81c16e0/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93", size = 1621679 }, + { url = "https://files.pythonhosted.org/packages/56/45/8e9a27fff0538173d47ba60362823358f7a5f1653c6c30c613469f94150e/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3", size = 1656878 }, + { url = "https://files.pythonhosted.org/packages/84/5b/8c5378f10d7a5a46b10cb9161a3aac3eeae6dba54ec0f627fc4ddc4f2e72/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8", size = 1620509 }, + { url = "https://files.pythonhosted.org/packages/9e/2f/99dee7bd91c62c5ff0aa3c55f4ae7e1bc99c6affef780d7777c60c5b3735/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2", size = 1680263 }, + { url = "https://files.pythonhosted.org/packages/03/0a/378745e4ff88acb83e2d5c884a4fe993a6e9f04600a4560ce0e9b19936e3/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261", size = 1715014 }, + { url = "https://files.pythonhosted.org/packages/f6/0b/b5524b3bb4b01e91bc4323aad0c2fcaebdf2f1b4d2eb22743948ba364958/aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7", size = 1666614 }, + { url = "https://files.pythonhosted.org/packages/c7/b7/3d7b036d5a4ed5a4c704e0754afe2eef24a824dfab08e6efbffb0f6dd36a/aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78", size = 411358 }, + { url = "https://files.pythonhosted.org/packages/1e/3c/143831b32cd23b5263a995b2a1794e10aa42f8a895aae5074c20fda36c07/aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01", size = 437658 }, +] + +[[package]] +name = "aiosignal" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597 }, +] + +[[package]] +name = "aiosqlite" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.13.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285 }, +] + +[[package]] +name = "brotli" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/d0/5373ae13b93fe00095a58efcbce837fd470ca39f703a235d2a999baadfbc/Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28", size = 815693 }, + { url = "https://files.pythonhosted.org/packages/8e/48/f6e1cdf86751300c288c1459724bfa6917a80e30dbfc326f92cea5d3683a/Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f", size = 422489 }, + { url = "https://files.pythonhosted.org/packages/06/88/564958cedce636d0f1bed313381dfc4b4e3d3f6015a63dae6146e1b8c65c/Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409", size = 873081 }, + { url = "https://files.pythonhosted.org/packages/58/79/b7026a8bb65da9a6bb7d14329fd2bd48d2b7f86d7329d5cc8ddc6a90526f/Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2", size = 446244 }, + { url = "https://files.pythonhosted.org/packages/e5/18/c18c32ecea41b6c0004e15606e274006366fe19436b6adccc1ae7b2e50c2/Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451", size = 2906505 }, + { url = "https://files.pythonhosted.org/packages/08/c8/69ec0496b1ada7569b62d85893d928e865df29b90736558d6c98c2031208/Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91", size = 2944152 }, + { url = "https://files.pythonhosted.org/packages/ab/fb/0517cea182219d6768113a38167ef6d4eb157a033178cc938033a552ed6d/Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408", size = 2919252 }, + { url = "https://files.pythonhosted.org/packages/c7/53/73a3431662e33ae61a5c80b1b9d2d18f58dfa910ae8dd696e57d39f1a2f5/Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0", size = 2845955 }, + { url = "https://files.pythonhosted.org/packages/55/ac/bd280708d9c5ebdbf9de01459e625a3e3803cce0784f47d633562cf40e83/Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc", size = 2914304 }, + { url = "https://files.pythonhosted.org/packages/76/58/5c391b41ecfc4527d2cc3350719b02e87cb424ef8ba2023fb662f9bf743c/Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180", size = 2814452 }, + { url = "https://files.pythonhosted.org/packages/c7/4e/91b8256dfe99c407f174924b65a01f5305e303f486cc7a2e8a5d43c8bec3/Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248", size = 2938751 }, + { url = "https://files.pythonhosted.org/packages/5a/a6/e2a39a5d3b412938362bbbeba5af904092bf3f95b867b4a3eb856104074e/Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966", size = 2933757 }, + { url = "https://files.pythonhosted.org/packages/13/f0/358354786280a509482e0e77c1a5459e439766597d280f28cb097642fc26/Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9", size = 2936146 }, + { url = "https://files.pythonhosted.org/packages/80/f7/daf538c1060d3a88266b80ecc1d1c98b79553b3f117a485653f17070ea2a/Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb", size = 2848055 }, + { url = "https://files.pythonhosted.org/packages/ad/cf/0eaa0585c4077d3c2d1edf322d8e97aabf317941d3a72d7b3ad8bce004b0/Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111", size = 3035102 }, + { url = "https://files.pythonhosted.org/packages/d8/63/1c1585b2aa554fe6dbce30f0c18bdbc877fa9a1bf5ff17677d9cca0ac122/Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839", size = 2930029 }, + { url = "https://files.pythonhosted.org/packages/5f/3b/4e3fd1893eb3bbfef8e5a80d4508bec17a57bb92d586c85c12d28666bb13/Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0", size = 333276 }, + { url = "https://files.pythonhosted.org/packages/3d/d5/942051b45a9e883b5b6e98c041698b1eb2012d25e5948c58d6bf85b1bb43/Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951", size = 357255 }, + { url = "https://files.pythonhosted.org/packages/0a/9f/fb37bb8ffc52a8da37b1c03c459a8cd55df7a57bdccd8831d500e994a0ca/Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5", size = 815681 }, + { url = "https://files.pythonhosted.org/packages/06/b3/dbd332a988586fefb0aa49c779f59f47cae76855c2d00f450364bb574cac/Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8", size = 422475 }, + { url = "https://files.pythonhosted.org/packages/bb/80/6aaddc2f63dbcf2d93c2d204e49c11a9ec93a8c7c63261e2b4bd35198283/Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f", size = 2906173 }, + { url = "https://files.pythonhosted.org/packages/ea/1d/e6ca79c96ff5b641df6097d299347507d39a9604bde8915e76bf026d6c77/Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648", size = 2943803 }, + { url = "https://files.pythonhosted.org/packages/ac/a3/d98d2472e0130b7dd3acdbb7f390d478123dbf62b7d32bda5c830a96116d/Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0", size = 2918946 }, + { url = "https://files.pythonhosted.org/packages/c4/a5/c69e6d272aee3e1423ed005d8915a7eaa0384c7de503da987f2d224d0721/Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089", size = 2845707 }, + { url = "https://files.pythonhosted.org/packages/58/9f/4149d38b52725afa39067350696c09526de0125ebfbaab5acc5af28b42ea/Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368", size = 2936231 }, + { url = "https://files.pythonhosted.org/packages/5a/5a/145de884285611838a16bebfdb060c231c52b8f84dfbe52b852a15780386/Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c", size = 2848157 }, + { url = "https://files.pythonhosted.org/packages/50/ae/408b6bfb8525dadebd3b3dd5b19d631da4f7d46420321db44cd99dcf2f2c/Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284", size = 3035122 }, + { url = "https://files.pythonhosted.org/packages/af/85/a94e5cfaa0ca449d8f91c3d6f78313ebf919a0dbd55a100c711c6e9655bc/Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7", size = 2930206 }, + { url = "https://files.pythonhosted.org/packages/c2/f0/a61d9262cd01351df22e57ad7c34f66794709acab13f34be2675f45bf89d/Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0", size = 333804 }, + { url = "https://files.pythonhosted.org/packages/7e/c1/ec214e9c94000d1c1974ec67ced1c970c148aa6b8d8373066123fc3dbf06/Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b", size = 358517 }, +] + +[[package]] +name = "certifi" +version = "2025.4.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622 }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435 }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653 }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231 }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243 }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442 }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147 }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057 }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454 }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174 }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166 }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064 }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641 }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "crawl4ai" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiofiles" }, + { name = "aiohttp" }, + { name = "aiosqlite" }, + { name = "beautifulsoup4" }, + { name = "brotli" }, + { name = "chardet" }, + { name = "click" }, + { name = "colorama" }, + { name = "cssselect" }, + { name = "fake-useragent" }, + { name = "httpx" }, + { name = "humanize" }, + { name = "litellm" }, + { name = "lxml" }, + { name = "nltk" }, + { name = "numpy" }, + { name = "pillow" }, + { name = "playwright" }, + { name = "psutil" }, + { name = "pydantic" }, + { name = "pyopenssl" }, + { name = "pyperclip" }, + { name = "python-dotenv" }, + { name = "rank-bm25" }, + { name = "requests" }, + { name = "rich" }, + { name = "snowballstemmer" }, + { name = "tf-playwright-stealth" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/9d/0f63b4f8ea487843b33a6da4b1ffff9e77dc4eee32cd25fb8bb52f3e6e04/crawl4ai-0.6.2.tar.gz", hash = "sha256:f52acee539500ec5fc8edbb7d3a3378a1b26f79017a52117bd10673a90a0f562", size = 291051 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/35/a604b5bc889c23f0960823092a75241fe81d1c7ae0762c5277583da08c5b/crawl4ai-0.6.2-py3-none-any.whl", hash = "sha256:f52ae16081afcd4b398c023fb9c9c8d31c592047bade4a97054a82c2271c54e6", size = 287248 }, +] + +[[package]] +name = "crawl4ai-mcp" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "crawl4ai" }, + { name = "dotenv" }, + { name = "mcp" }, + { name = "neo4j" }, + { name = "openai" }, + { name = "sentence-transformers" }, + { name = "supabase" }, +] + +[package.metadata] +requires-dist = [ + { name = "crawl4ai", specifier = "==0.6.2" }, + { name = "dotenv", specifier = "==0.9.9" }, + { name = "mcp", specifier = "==1.7.1" }, + { name = "neo4j", specifier = ">=5.28.1" }, + { name = "openai", specifier = "==1.71.0" }, + { name = "sentence-transformers", specifier = ">=4.1.0" }, + { name = "supabase", specifier = "==2.15.1" }, +] + +[[package]] +name = "cryptography" +version = "44.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281 }, + { url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305 }, + { url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040 }, + { url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411 }, + { url = "https://files.pythonhosted.org/packages/42/b2/7d31f2af5591d217d71d37d044ef5412945a8a8e98d5a2a8ae4fd9cd4489/cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359", size = 3689263 }, + { url = "https://files.pythonhosted.org/packages/25/50/c0dfb9d87ae88ccc01aad8eb93e23cfbcea6a6a106a9b63a7b14c1f93c75/cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43", size = 4196198 }, + { url = "https://files.pythonhosted.org/packages/66/c9/55c6b8794a74da652690c898cb43906310a3e4e4f6ee0b5f8b3b3e70c441/cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01", size = 3966502 }, + { url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173 }, + { url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713 }, + { url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064 }, + { url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887 }, + { url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737 }, + { url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501 }, + { url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307 }, + { url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876 }, + { url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127 }, + { url = "https://files.pythonhosted.org/packages/01/44/eb6522db7d9f84e8833ba3bf63313f8e257729cf3a8917379473fcfd6601/cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54", size = 3689164 }, + { url = "https://files.pythonhosted.org/packages/68/fb/d61a4defd0d6cee20b1b8a1ea8f5e25007e26aeb413ca53835f0cae2bcd1/cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93", size = 4198081 }, + { url = "https://files.pythonhosted.org/packages/1b/50/457f6911d36432a8811c3ab8bd5a6090e8d18ce655c22820994913dd06ea/cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c", size = 3967716 }, + { url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398 }, + { url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900 }, + { url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067 }, + { url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467 }, + { url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375 }, +] + +[[package]] +name = "cssselect" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/0a/c3ea9573b1dc2e151abfe88c7fe0c26d1892fe6ed02d0cdb30f0d57029d5/cssselect-1.3.0.tar.gz", hash = "sha256:57f8a99424cfab289a1b6a816a43075a4b00948c86b4dcf3ef4ee7e15f7ab0c7", size = 42870 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/58/257350f7db99b4ae12b614a36256d9cc870d71d9e451e79c2dc3b23d7c3c/cssselect-1.3.0-py3-none-any.whl", hash = "sha256:56d1bf3e198080cc1667e137bc51de9cadfca259f03c2d4e09037b3e01e30f0d", size = 18786 }, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, +] + +[[package]] +name = "dotenv" +version = "0.9.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dotenv" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/b7/545d2c10c1fc15e48653c91efde329a790f2eecfbbf2bd16003b5db2bab0/dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9", size = 1892 }, +] + +[[package]] +name = "fake-http-header" +version = "0.3.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/0b/2849c87d9f13766e29c0a2f4d31681aa72e035016b251ab19d99bde7b592/fake_http_header-0.3.5-py3-none-any.whl", hash = "sha256:cd05f4bebf1b7e38b5f5c03d7fb820c0c17e87d9614fbee0afa39c32c7a2ad3c", size = 14938 }, +] + +[[package]] +name = "fake-useragent" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/43/948d10bf42735709edb5ae51e23297d034086f17fc7279fef385a7acb473/fake_useragent-2.2.0.tar.gz", hash = "sha256:4e6ab6571e40cc086d788523cf9e018f618d07f9050f822ff409a4dfe17c16b2", size = 158898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/37/b3ea9cd5558ff4cb51957caca2193981c6b0ff30bd0d2630ac62505d99d0/fake_useragent-2.2.0-py3-none-any.whl", hash = "sha256:67f35ca4d847b0d298187443aaf020413746e56acd985a611908c73dba2daa24", size = 161695 }, +] + +[[package]] +name = "filelock" +version = "3.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 }, +] + +[[package]] +name = "frozenlist" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f4/d744cba2da59b5c1d88823cf9e8a6c74e4659e2b27604ed973be2a0bf5ab/frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68", size = 42831 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/8a/289b7d0de2fbac832ea80944d809759976f661557a38bb8e77db5d9f79b7/frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1", size = 160193 }, + { url = "https://files.pythonhosted.org/packages/19/80/2fd17d322aec7f430549f0669f599997174f93ee17929ea5b92781ec902c/frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29", size = 123831 }, + { url = "https://files.pythonhosted.org/packages/99/06/f5812da431273f78c6543e0b2f7de67dfd65eb0a433978b2c9c63d2205e4/frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25", size = 121862 }, + { url = "https://files.pythonhosted.org/packages/d0/31/9e61c6b5fc493cf24d54881731204d27105234d09878be1a5983182cc4a5/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576", size = 316361 }, + { url = "https://files.pythonhosted.org/packages/9d/55/22ca9362d4f0222324981470fd50192be200154d51509ee6eb9baa148e96/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8", size = 307115 }, + { url = "https://files.pythonhosted.org/packages/ae/39/4fff42920a57794881e7bb3898dc7f5f539261711ea411b43bba3cde8b79/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9", size = 322505 }, + { url = "https://files.pythonhosted.org/packages/55/f2/88c41f374c1e4cf0092a5459e5f3d6a1e17ed274c98087a76487783df90c/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e", size = 322666 }, + { url = "https://files.pythonhosted.org/packages/75/51/034eeb75afdf3fd03997856195b500722c0b1a50716664cde64e28299c4b/frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590", size = 302119 }, + { url = "https://files.pythonhosted.org/packages/2b/a6/564ecde55ee633270a793999ef4fd1d2c2b32b5a7eec903b1012cb7c5143/frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103", size = 316226 }, + { url = "https://files.pythonhosted.org/packages/f1/c8/6c0682c32377f402b8a6174fb16378b683cf6379ab4d2827c580892ab3c7/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c", size = 312788 }, + { url = "https://files.pythonhosted.org/packages/b6/b8/10fbec38f82c5d163ca1750bfff4ede69713badf236a016781cf1f10a0f0/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821", size = 325914 }, + { url = "https://files.pythonhosted.org/packages/62/ca/2bf4f3a1bd40cdedd301e6ecfdbb291080d5afc5f9ce350c0739f773d6b9/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70", size = 305283 }, + { url = "https://files.pythonhosted.org/packages/09/64/20cc13ccf94abc2a1f482f74ad210703dc78a590d0b805af1c9aa67f76f9/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f", size = 319264 }, + { url = "https://files.pythonhosted.org/packages/20/ff/86c6a2bbe98cfc231519f5e6d712a0898488ceac804a917ce014f32e68f6/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046", size = 326482 }, + { url = "https://files.pythonhosted.org/packages/2f/da/8e381f66367d79adca245d1d71527aac774e30e291d41ef161ce2d80c38e/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770", size = 318248 }, + { url = "https://files.pythonhosted.org/packages/39/24/1a1976563fb476ab6f0fa9fefaac7616a4361dbe0461324f9fd7bf425dbe/frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc", size = 115161 }, + { url = "https://files.pythonhosted.org/packages/80/2e/fb4ed62a65f8cd66044706b1013f0010930d8cbb0729a2219561ea075434/frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878", size = 120548 }, + { url = "https://files.pythonhosted.org/packages/6f/e5/04c7090c514d96ca00887932417f04343ab94904a56ab7f57861bf63652d/frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e", size = 158182 }, + { url = "https://files.pythonhosted.org/packages/e9/8f/60d0555c61eec855783a6356268314d204137f5e0c53b59ae2fc28938c99/frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117", size = 122838 }, + { url = "https://files.pythonhosted.org/packages/5a/a7/d0ec890e3665b4b3b7c05dc80e477ed8dc2e2e77719368e78e2cd9fec9c8/frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4", size = 120980 }, + { url = "https://files.pythonhosted.org/packages/cc/19/9b355a5e7a8eba903a008579964192c3e427444752f20b2144b10bb336df/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3", size = 305463 }, + { url = "https://files.pythonhosted.org/packages/9c/8d/5b4c758c2550131d66935ef2fa700ada2461c08866aef4229ae1554b93ca/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1", size = 297985 }, + { url = "https://files.pythonhosted.org/packages/48/2c/537ec09e032b5865715726b2d1d9813e6589b571d34d01550c7aeaad7e53/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c", size = 311188 }, + { url = "https://files.pythonhosted.org/packages/31/2f/1aa74b33f74d54817055de9a4961eff798f066cdc6f67591905d4fc82a84/frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45", size = 311874 }, + { url = "https://files.pythonhosted.org/packages/bf/f0/cfec18838f13ebf4b37cfebc8649db5ea71a1b25dacd691444a10729776c/frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f", size = 291897 }, + { url = "https://files.pythonhosted.org/packages/ea/a5/deb39325cbbea6cd0a46db8ccd76150ae2fcbe60d63243d9df4a0b8c3205/frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85", size = 305799 }, + { url = "https://files.pythonhosted.org/packages/78/22/6ddec55c5243a59f605e4280f10cee8c95a449f81e40117163383829c241/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8", size = 302804 }, + { url = "https://files.pythonhosted.org/packages/5d/b7/d9ca9bab87f28855063c4d202936800219e39db9e46f9fb004d521152623/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f", size = 316404 }, + { url = "https://files.pythonhosted.org/packages/a6/3a/1255305db7874d0b9eddb4fe4a27469e1fb63720f1fc6d325a5118492d18/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f", size = 295572 }, + { url = "https://files.pythonhosted.org/packages/2a/f2/8d38eeee39a0e3a91b75867cc102159ecccf441deb6ddf67be96d3410b84/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6", size = 307601 }, + { url = "https://files.pythonhosted.org/packages/38/04/80ec8e6b92f61ef085422d7b196822820404f940950dde5b2e367bede8bc/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188", size = 314232 }, + { url = "https://files.pythonhosted.org/packages/3a/58/93b41fb23e75f38f453ae92a2f987274c64637c450285577bd81c599b715/frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e", size = 308187 }, + { url = "https://files.pythonhosted.org/packages/6a/a2/e64df5c5aa36ab3dee5a40d254f3e471bb0603c225f81664267281c46a2d/frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4", size = 114772 }, + { url = "https://files.pythonhosted.org/packages/a0/77/fead27441e749b2d574bb73d693530d59d520d4b9e9679b8e3cb779d37f2/frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd", size = 119847 }, + { url = "https://files.pythonhosted.org/packages/df/bd/cc6d934991c1e5d9cafda83dfdc52f987c7b28343686aef2e58a9cf89f20/frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64", size = 174937 }, + { url = "https://files.pythonhosted.org/packages/f2/a2/daf945f335abdbfdd5993e9dc348ef4507436936ab3c26d7cfe72f4843bf/frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91", size = 136029 }, + { url = "https://files.pythonhosted.org/packages/51/65/4c3145f237a31247c3429e1c94c384d053f69b52110a0d04bfc8afc55fb2/frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd", size = 134831 }, + { url = "https://files.pythonhosted.org/packages/77/38/03d316507d8dea84dfb99bdd515ea245628af964b2bf57759e3c9205cc5e/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2", size = 392981 }, + { url = "https://files.pythonhosted.org/packages/37/02/46285ef9828f318ba400a51d5bb616ded38db8466836a9cfa39f3903260b/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506", size = 371999 }, + { url = "https://files.pythonhosted.org/packages/0d/64/1212fea37a112c3c5c05bfb5f0a81af4836ce349e69be75af93f99644da9/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0", size = 392200 }, + { url = "https://files.pythonhosted.org/packages/81/ce/9a6ea1763e3366e44a5208f76bf37c76c5da570772375e4d0be85180e588/frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0", size = 390134 }, + { url = "https://files.pythonhosted.org/packages/bc/36/939738b0b495b2c6d0c39ba51563e453232813042a8d908b8f9544296c29/frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e", size = 365208 }, + { url = "https://files.pythonhosted.org/packages/b4/8b/939e62e93c63409949c25220d1ba8e88e3960f8ef6a8d9ede8f94b459d27/frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c", size = 385548 }, + { url = "https://files.pythonhosted.org/packages/62/38/22d2873c90102e06a7c5a3a5b82ca47e393c6079413e8a75c72bff067fa8/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b", size = 391123 }, + { url = "https://files.pythonhosted.org/packages/44/78/63aaaf533ee0701549500f6d819be092c6065cb5c577edb70c09df74d5d0/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad", size = 394199 }, + { url = "https://files.pythonhosted.org/packages/54/45/71a6b48981d429e8fbcc08454dc99c4c2639865a646d549812883e9c9dd3/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215", size = 373854 }, + { url = "https://files.pythonhosted.org/packages/3f/f3/dbf2a5e11736ea81a66e37288bf9f881143a7822b288a992579ba1b4204d/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2", size = 395412 }, + { url = "https://files.pythonhosted.org/packages/b3/f1/c63166806b331f05104d8ea385c4acd511598568b1f3e4e8297ca54f2676/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911", size = 394936 }, + { url = "https://files.pythonhosted.org/packages/ef/ea/4f3e69e179a430473eaa1a75ff986526571215fefc6b9281cdc1f09a4eb8/frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497", size = 391459 }, + { url = "https://files.pythonhosted.org/packages/d3/c3/0fc2c97dea550df9afd072a37c1e95421652e3206bbeaa02378b24c2b480/frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f", size = 128797 }, + { url = "https://files.pythonhosted.org/packages/ae/f5/79c9320c5656b1965634fe4be9c82b12a3305bdbc58ad9cb941131107b20/frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348", size = 134709 }, + { url = "https://files.pythonhosted.org/packages/71/3e/b04a0adda73bd52b390d730071c0d577073d3d26740ee1bad25c3ad0f37b/frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191", size = 12404 }, +] + +[[package]] +name = "fsspec" +version = "2025.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/d8/8425e6ba5fcec61a1d16e41b1b71d2bf9344f1fe48012c2b48b9620feae5/fsspec-2025.3.2.tar.gz", hash = "sha256:e52c77ef398680bbd6a98c0e628fbc469491282981209907bbc8aea76a04fdc6", size = 299281 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/4b/e0cfc1a6f17e990f3e64b7d941ddc4acdc7b19d6edd51abf495f32b1a9e4/fsspec-2025.3.2-py3-none-any.whl", hash = "sha256:2daf8dc3d1dfa65b6aa37748d112773a7a08416f6c70d96b264c96476ecaf711", size = 194435 }, +] + +[[package]] +name = "gotrue" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "pydantic" }, + { name = "pyjwt" }, + { name = "pytest-mock" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/97/577c6d67f2d3687199ba7c5628af65108f346a15877c93831081ab67a341/gotrue-2.12.0.tar.gz", hash = "sha256:b9ea164ee52964d8364c550cde16dd0e9576241a4cffeaa52eca339f61d1d14b", size = 37883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/5c/fe0dd370294c782fc1f627bb7e3eedd87c3d4d7f8d2b39fe8dd63c3096a8/gotrue-2.12.0-py3-none-any.whl", hash = "sha256:de94928eebb42d7d9672dbe4fbd0b51140a45051a31626a06dad2ad44a9a976a", size = 43649 }, +] + +[[package]] +name = "greenlet" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/74/907bb43af91782e0366b0960af62a8ce1f9398e4291cac7beaeffbee0c04/greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7", size = 184475 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/d1/e4777b188a04726f6cf69047830d37365b9191017f54caf2f7af336a6f18/greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea", size = 270381 }, + { url = "https://files.pythonhosted.org/packages/59/e7/b5b738f5679247ddfcf2179c38945519668dced60c3164c20d55c1a7bb4a/greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8", size = 637195 }, + { url = "https://files.pythonhosted.org/packages/6c/9f/57968c88a5f6bc371364baf983a2e5549cca8f503bfef591b6dd81332cbc/greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840", size = 651381 }, + { url = "https://files.pythonhosted.org/packages/40/81/1533c9a458e9f2ebccb3ae22f1463b2093b0eb448a88aac36182f1c2cd3d/greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9", size = 646110 }, + { url = "https://files.pythonhosted.org/packages/06/66/25f7e4b1468ebe4a520757f2e41c2a36a2f49a12e963431b82e9f98df2a0/greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12", size = 648070 }, + { url = "https://files.pythonhosted.org/packages/d7/4c/49d366565c4c4d29e6f666287b9e2f471a66c3a3d8d5066692e347f09e27/greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22", size = 603816 }, + { url = "https://files.pythonhosted.org/packages/04/15/1612bb61506f44b6b8b6bebb6488702b1fe1432547e95dda57874303a1f5/greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1", size = 1119572 }, + { url = "https://files.pythonhosted.org/packages/cc/2f/002b99dacd1610e825876f5cbbe7f86740aa2a6b76816e5eca41c8457e85/greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145", size = 1147442 }, + { url = "https://files.pythonhosted.org/packages/c0/ba/82a2c3b9868644ee6011da742156247070f30e952f4d33f33857458450f2/greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d", size = 296207 }, + { url = "https://files.pythonhosted.org/packages/77/2a/581b3808afec55b2db838742527c40b4ce68b9b64feedff0fd0123f4b19a/greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac", size = 269119 }, + { url = "https://files.pythonhosted.org/packages/b0/f3/1c4e27fbdc84e13f05afc2baf605e704668ffa26e73a43eca93e1120813e/greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437", size = 637314 }, + { url = "https://files.pythonhosted.org/packages/fc/1a/9fc43cb0044f425f7252da9847893b6de4e3b20c0a748bce7ab3f063d5bc/greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a", size = 651421 }, + { url = "https://files.pythonhosted.org/packages/8a/65/d47c03cdc62c6680206b7420c4a98363ee997e87a5e9da1e83bd7eeb57a8/greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c", size = 645789 }, + { url = "https://files.pythonhosted.org/packages/2f/40/0faf8bee1b106c241780f377b9951dd4564ef0972de1942ef74687aa6bba/greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982", size = 648262 }, + { url = "https://files.pythonhosted.org/packages/e0/a8/73305f713183c2cb08f3ddd32eaa20a6854ba9c37061d682192db9b021c3/greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07", size = 606770 }, + { url = "https://files.pythonhosted.org/packages/c3/05/7d726e1fb7f8a6ac55ff212a54238a36c57db83446523c763e20cd30b837/greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95", size = 1117960 }, + { url = "https://files.pythonhosted.org/packages/bf/9f/2b6cb1bd9f1537e7b08c08705c4a1d7bd4f64489c67d102225c4fd262bda/greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123", size = 1145500 }, + { url = "https://files.pythonhosted.org/packages/e4/f6/339c6e707062319546598eb9827d3ca8942a3eccc610d4a54c1da7b62527/greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495", size = 295994 }, + { url = "https://files.pythonhosted.org/packages/f1/72/2a251d74a596af7bb1717e891ad4275a3fd5ac06152319d7ad8c77f876af/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526", size = 629889 }, + { url = "https://files.pythonhosted.org/packages/29/2e/d7ed8bf97641bf704b6a43907c0e082cdf44d5bc026eb8e1b79283e7a719/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5", size = 635261 }, + { url = "https://files.pythonhosted.org/packages/1e/75/802aa27848a6fcb5e566f69c64534f572e310f0f12d41e9201a81e741551/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32", size = 632523 }, + { url = "https://files.pythonhosted.org/packages/56/09/f7c1c3bab9b4c589ad356503dd71be00935e9c4db4db516ed88fc80f1187/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc", size = 628816 }, + { url = "https://files.pythonhosted.org/packages/79/e0/1bb90d30b5450eac2dffeaac6b692857c4bd642c21883b79faa8fa056cf2/greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb", size = 593687 }, + { url = "https://files.pythonhosted.org/packages/c5/b5/adbe03c8b4c178add20cc716021183ae6b0326d56ba8793d7828c94286f6/greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8", size = 1105754 }, + { url = "https://files.pythonhosted.org/packages/39/93/84582d7ef38dec009543ccadec6ab41079a6cbc2b8c0566bcd07bf1aaf6c/greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d", size = 1125160 }, + { url = "https://files.pythonhosted.org/packages/01/e6/f9d759788518a6248684e3afeb3691f3ab0276d769b6217a1533362298c8/greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189", size = 269897 }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, +] + +[[package]] +name = "h2" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, +] + +[[package]] +name = "huggingface-hub" +version = "0.30.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/22/8eb91736b1dcb83d879bd49050a09df29a57cc5cd9f38e48a4b1c45ee890/huggingface_hub-0.30.2.tar.gz", hash = "sha256:9a7897c5b6fd9dad3168a794a8998d6378210f5b9688d0dfc180b1a228dc2466", size = 400868 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/27/1fb384a841e9661faad1c31cbfa62864f59632e876df5d795234da51c395/huggingface_hub-0.30.2-py3-none-any.whl", hash = "sha256:68ff05969927058cfa41df4f2155d4bb48f5f54f719dd0390103eefa9b191e28", size = 481433 }, +] + +[[package]] +name = "humanize" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/22/d1/bbc4d251187a43f69844f7fd8941426549bbe4723e8ff0a7441796b0789f/humanize-4.12.3.tar.gz", hash = "sha256:8430be3a615106fdfceb0b2c1b41c4c98c6b0fc5cc59663a5539b111dd325fb0", size = 80514 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/1e/62a2ec3104394a2975a2629eec89276ede9dbe717092f6966fcf963e1bf0/humanize-4.12.3-py3-none-any.whl", hash = "sha256:2cbf6370af06568fa6d2da77c86edb7886f3160ecd19ee1ffef07979efc597f6", size = 128487 }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971 }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, +] + +[[package]] +name = "jiter" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/c2/e4562507f52f0af7036da125bb699602ead37a2332af0788f8e0a3417f36/jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893", size = 162604 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/d7/c55086103d6f29b694ec79156242304adf521577530d9031317ce5338c59/jiter-0.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7b46249cfd6c48da28f89eb0be3f52d6fdb40ab88e2c66804f546674e539ec11", size = 309203 }, + { url = "https://files.pythonhosted.org/packages/b0/01/f775dfee50beb420adfd6baf58d1c4d437de41c9b666ddf127c065e5a488/jiter-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:609cf3c78852f1189894383cf0b0b977665f54cb38788e3e6b941fa6d982c00e", size = 319678 }, + { url = "https://files.pythonhosted.org/packages/ab/b8/09b73a793714726893e5d46d5c534a63709261af3d24444ad07885ce87cb/jiter-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d726a3890a54561e55a9c5faea1f7655eda7f105bd165067575ace6e65f80bb2", size = 341816 }, + { url = "https://files.pythonhosted.org/packages/35/6f/b8f89ec5398b2b0d344257138182cc090302854ed63ed9c9051e9c673441/jiter-0.9.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e89dc075c1fef8fa9be219e249f14040270dbc507df4215c324a1839522ea75", size = 364152 }, + { url = "https://files.pythonhosted.org/packages/9b/ca/978cc3183113b8e4484cc7e210a9ad3c6614396e7abd5407ea8aa1458eef/jiter-0.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e8ffa3c353b1bc4134f96f167a2082494351e42888dfcf06e944f2729cbe1d", size = 406991 }, + { url = "https://files.pythonhosted.org/packages/13/3a/72861883e11a36d6aa314b4922125f6ae90bdccc225cd96d24cc78a66385/jiter-0.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:203f28a72a05ae0e129b3ed1f75f56bc419d5f91dfacd057519a8bd137b00c42", size = 395824 }, + { url = "https://files.pythonhosted.org/packages/87/67/22728a86ef53589c3720225778f7c5fdb617080e3deaed58b04789418212/jiter-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca1a02ad60ec30bb230f65bc01f611c8608b02d269f998bc29cca8619a919dc", size = 351318 }, + { url = "https://files.pythonhosted.org/packages/69/b9/f39728e2e2007276806d7a6609cda7fac44ffa28ca0d02c49a4f397cc0d9/jiter-0.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:237e5cee4d5d2659aaf91bbf8ec45052cc217d9446070699441a91b386ae27dc", size = 384591 }, + { url = "https://files.pythonhosted.org/packages/eb/8f/8a708bc7fd87b8a5d861f1c118a995eccbe6d672fe10c9753e67362d0dd0/jiter-0.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:528b6b71745e7326eed73c53d4aa57e2a522242320b6f7d65b9c5af83cf49b6e", size = 520746 }, + { url = "https://files.pythonhosted.org/packages/95/1e/65680c7488bd2365dbd2980adaf63c562d3d41d3faac192ebc7ef5b4ae25/jiter-0.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9f48e86b57bc711eb5acdfd12b6cb580a59cc9a993f6e7dcb6d8b50522dcd50d", size = 512754 }, + { url = "https://files.pythonhosted.org/packages/78/f3/fdc43547a9ee6e93c837685da704fb6da7dba311fc022e2766d5277dfde5/jiter-0.9.0-cp312-cp312-win32.whl", hash = "sha256:699edfde481e191d81f9cf6d2211debbfe4bd92f06410e7637dffb8dd5dfde06", size = 207075 }, + { url = "https://files.pythonhosted.org/packages/cd/9d/742b289016d155f49028fe1bfbeb935c9bf0ffeefdf77daf4a63a42bb72b/jiter-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:099500d07b43f61d8bd780466d429c45a7b25411b334c60ca875fa775f68ccb0", size = 207999 }, + { url = "https://files.pythonhosted.org/packages/e7/1b/4cd165c362e8f2f520fdb43245e2b414f42a255921248b4f8b9c8d871ff1/jiter-0.9.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2764891d3f3e8b18dce2cff24949153ee30c9239da7c00f032511091ba688ff7", size = 308197 }, + { url = "https://files.pythonhosted.org/packages/13/aa/7a890dfe29c84c9a82064a9fe36079c7c0309c91b70c380dc138f9bea44a/jiter-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:387b22fbfd7a62418d5212b4638026d01723761c75c1c8232a8b8c37c2f1003b", size = 318160 }, + { url = "https://files.pythonhosted.org/packages/6a/38/5888b43fc01102f733f085673c4f0be5a298f69808ec63de55051754e390/jiter-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d8da8629ccae3606c61d9184970423655fb4e33d03330bcdfe52d234d32f69", size = 341259 }, + { url = "https://files.pythonhosted.org/packages/3d/5e/bbdbb63305bcc01006de683b6228cd061458b9b7bb9b8d9bc348a58e5dc2/jiter-0.9.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1be73d8982bdc278b7b9377426a4b44ceb5c7952073dd7488e4ae96b88e1103", size = 363730 }, + { url = "https://files.pythonhosted.org/packages/75/85/53a3edc616992fe4af6814c25f91ee3b1e22f7678e979b6ea82d3bc0667e/jiter-0.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2228eaaaa111ec54b9e89f7481bffb3972e9059301a878d085b2b449fbbde635", size = 405126 }, + { url = "https://files.pythonhosted.org/packages/ae/b3/1ee26b12b2693bd3f0b71d3188e4e5d817b12e3c630a09e099e0a89e28fa/jiter-0.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11509bfecbc319459647d4ac3fd391d26fdf530dad00c13c4dadabf5b81f01a4", size = 393668 }, + { url = "https://files.pythonhosted.org/packages/11/87/e084ce261950c1861773ab534d49127d1517b629478304d328493f980791/jiter-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f22238da568be8bbd8e0650e12feeb2cfea15eda4f9fc271d3b362a4fa0604d", size = 352350 }, + { url = "https://files.pythonhosted.org/packages/f0/06/7dca84b04987e9df563610aa0bc154ea176e50358af532ab40ffb87434df/jiter-0.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17f5d55eb856597607562257c8e36c42bc87f16bef52ef7129b7da11afc779f3", size = 384204 }, + { url = "https://files.pythonhosted.org/packages/16/2f/82e1c6020db72f397dd070eec0c85ebc4df7c88967bc86d3ce9864148f28/jiter-0.9.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6a99bed9fbb02f5bed416d137944419a69aa4c423e44189bc49718859ea83bc5", size = 520322 }, + { url = "https://files.pythonhosted.org/packages/36/fd/4f0cd3abe83ce208991ca61e7e5df915aa35b67f1c0633eb7cf2f2e88ec7/jiter-0.9.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e057adb0cd1bd39606100be0eafe742de2de88c79df632955b9ab53a086b3c8d", size = 512184 }, + { url = "https://files.pythonhosted.org/packages/a0/3c/8a56f6d547731a0b4410a2d9d16bf39c861046f91f57c98f7cab3d2aa9ce/jiter-0.9.0-cp313-cp313-win32.whl", hash = "sha256:f7e6850991f3940f62d387ccfa54d1a92bd4bb9f89690b53aea36b4364bcab53", size = 206504 }, + { url = "https://files.pythonhosted.org/packages/f4/1c/0c996fd90639acda75ed7fa698ee5fd7d80243057185dc2f63d4c1c9f6b9/jiter-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:c8ae3bf27cd1ac5e6e8b7a27487bf3ab5f82318211ec2e1346a5b058756361f7", size = 204943 }, + { url = "https://files.pythonhosted.org/packages/78/0f/77a63ca7aa5fed9a1b9135af57e190d905bcd3702b36aca46a01090d39ad/jiter-0.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0b2827fb88dda2cbecbbc3e596ef08d69bda06c6f57930aec8e79505dc17001", size = 317281 }, + { url = "https://files.pythonhosted.org/packages/f9/39/a3a1571712c2bf6ec4c657f0d66da114a63a2e32b7e4eb8e0b83295ee034/jiter-0.9.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062b756ceb1d40b0b28f326cba26cfd575a4918415b036464a52f08632731e5a", size = 350273 }, + { url = "https://files.pythonhosted.org/packages/ee/47/3729f00f35a696e68da15d64eb9283c330e776f3b5789bac7f2c0c4df209/jiter-0.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6f7838bc467ab7e8ef9f387bd6de195c43bad82a569c1699cb822f6609dd4cdf", size = 206867 }, +] + +[[package]] +name = "joblib" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/64/33/60135848598c076ce4b231e1b1895170f45fbcaeaa2c9d5e38b04db70c35/joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e", size = 2116621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/29/df4b9b42f2be0b623cbd5e2140cafcaa2bef0759a00b7b70104dcfe2fb51/joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6", size = 301817 }, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437 }, +] + +[[package]] +name = "litellm" +version = "1.67.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "click" }, + { name = "httpx" }, + { name = "importlib-metadata" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "tiktoken" }, + { name = "tokenizers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/26/a07aa0c7a622e89b34dd26ae4c17fda398e1664fefa71379015656744546/litellm-1.67.6.tar.gz", hash = "sha256:8cd23db10463a02bb5a64fb69b243d97879ecf4075fe38740f8c4b93f3f770a6", size = 7308919 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/bf/27fe677b5c6631d40d14620983e521239e7e1360cb7c8ab4111f35971f56/litellm-1.67.6-py3-none-any.whl", hash = "sha256:3c3fb31e9e6e51d8d0eb2da4df1538a3924c2d8e1201775358678f79b1625966", size = 7677070 }, +] + +[[package]] +name = "lxml" +version = "5.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392 }, + { url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103 }, + { url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224 }, + { url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913 }, + { url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441 }, + { url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580 }, + { url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493 }, + { url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679 }, + { url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691 }, + { url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075 }, + { url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680 }, + { url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253 }, + { url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651 }, + { url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315 }, + { url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149 }, + { url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095 }, + { url = "https://files.pythonhosted.org/packages/87/cb/2ba1e9dd953415f58548506fa5549a7f373ae55e80c61c9041b7fd09a38a/lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0", size = 8110086 }, + { url = "https://files.pythonhosted.org/packages/b5/3e/6602a4dca3ae344e8609914d6ab22e52ce42e3e1638c10967568c5c1450d/lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de", size = 4404613 }, + { url = "https://files.pythonhosted.org/packages/4c/72/bf00988477d3bb452bef9436e45aeea82bb40cdfb4684b83c967c53909c7/lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76", size = 5012008 }, + { url = "https://files.pythonhosted.org/packages/92/1f/93e42d93e9e7a44b2d3354c462cd784dbaaf350f7976b5d7c3f85d68d1b1/lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d", size = 4760915 }, + { url = "https://files.pythonhosted.org/packages/45/0b/363009390d0b461cf9976a499e83b68f792e4c32ecef092f3f9ef9c4ba54/lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422", size = 5283890 }, + { url = "https://files.pythonhosted.org/packages/19/dc/6056c332f9378ab476c88e301e6549a0454dbee8f0ae16847414f0eccb74/lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551", size = 4812644 }, + { url = "https://files.pythonhosted.org/packages/ee/8a/f8c66bbb23ecb9048a46a5ef9b495fd23f7543df642dabeebcb2eeb66592/lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c", size = 4921817 }, + { url = "https://files.pythonhosted.org/packages/04/57/2e537083c3f381f83d05d9b176f0d838a9e8961f7ed8ddce3f0217179ce3/lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff", size = 4753916 }, + { url = "https://files.pythonhosted.org/packages/d8/80/ea8c4072109a350848f1157ce83ccd9439601274035cd045ac31f47f3417/lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60", size = 5289274 }, + { url = "https://files.pythonhosted.org/packages/b3/47/c4be287c48cdc304483457878a3f22999098b9a95f455e3c4bda7ec7fc72/lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8", size = 4874757 }, + { url = "https://files.pythonhosted.org/packages/2f/04/6ef935dc74e729932e39478e44d8cfe6a83550552eaa072b7c05f6f22488/lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982", size = 4947028 }, + { url = "https://files.pythonhosted.org/packages/cb/f9/c33fc8daa373ef8a7daddb53175289024512b6619bc9de36d77dca3df44b/lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61", size = 4834487 }, + { url = "https://files.pythonhosted.org/packages/8d/30/fc92bb595bcb878311e01b418b57d13900f84c2b94f6eca9e5073ea756e6/lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54", size = 5381688 }, + { url = "https://files.pythonhosted.org/packages/43/d1/3ba7bd978ce28bba8e3da2c2e9d5ae3f8f521ad3f0ca6ea4788d086ba00d/lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b", size = 5242043 }, + { url = "https://files.pythonhosted.org/packages/ee/cd/95fa2201041a610c4d08ddaf31d43b98ecc4b1d74b1e7245b1abdab443cb/lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a", size = 5021569 }, + { url = "https://files.pythonhosted.org/packages/2d/a6/31da006fead660b9512d08d23d31e93ad3477dd47cc42e3285f143443176/lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82", size = 3485270 }, + { url = "https://files.pythonhosted.org/packages/fc/14/c115516c62a7d2499781d2d3d7215218c0731b2c940753bf9f9b7b73924d/lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f", size = 3814606 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +] + +[[package]] +name = "mcp" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/ae/588691c45b38f4fbac07fa3d6d50cea44cc6b35d16ddfdf26e17a0467ab2/mcp-1.7.1.tar.gz", hash = "sha256:eb4f1f53bd717f75dda8a1416e00804b831a8f3c331e23447a03b78f04b43a6e", size = 230903 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/79/fe0e20c3358997a80911af51bad927b5ea2f343ef95ab092b19c9cc48b59/mcp-1.7.1-py3-none-any.whl", hash = "sha256:f7e6108977db6d03418495426c7ace085ba2341b75197f8727f96f9cfd30057a", size = 100365 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, +] + +[[package]] +name = "multidict" +version = "6.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/2c/e367dfb4c6538614a0c9453e510d75d66099edf1c4e69da1b5ce691a1931/multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec", size = 89372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/bb/3abdaf8fe40e9226ce8a2ba5ecf332461f7beec478a455d6587159f1bf92/multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676", size = 64019 }, + { url = "https://files.pythonhosted.org/packages/7e/b5/1b2e8de8217d2e89db156625aa0fe4a6faad98972bfe07a7b8c10ef5dd6b/multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1", size = 37925 }, + { url = "https://files.pythonhosted.org/packages/b4/e2/3ca91c112644a395c8eae017144c907d173ea910c913ff8b62549dcf0bbf/multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a", size = 37008 }, + { url = "https://files.pythonhosted.org/packages/60/23/79bc78146c7ac8d1ac766b2770ca2e07c2816058b8a3d5da6caed8148637/multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054", size = 224374 }, + { url = "https://files.pythonhosted.org/packages/86/35/77950ed9ebd09136003a85c1926ba42001ca5be14feb49710e4334ee199b/multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc", size = 230869 }, + { url = "https://files.pythonhosted.org/packages/49/97/2a33c6e7d90bc116c636c14b2abab93d6521c0c052d24bfcc231cbf7f0e7/multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07", size = 231949 }, + { url = "https://files.pythonhosted.org/packages/56/ce/e9b5d9fcf854f61d6686ada7ff64893a7a5523b2a07da6f1265eaaea5151/multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde", size = 231032 }, + { url = "https://files.pythonhosted.org/packages/f0/ac/7ced59dcdfeddd03e601edb05adff0c66d81ed4a5160c443e44f2379eef0/multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c", size = 223517 }, + { url = "https://files.pythonhosted.org/packages/db/e6/325ed9055ae4e085315193a1b58bdb4d7fc38ffcc1f4975cfca97d015e17/multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae", size = 216291 }, + { url = "https://files.pythonhosted.org/packages/fa/84/eeee6d477dd9dcb7691c3bb9d08df56017f5dd15c730bcc9383dcf201cf4/multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3", size = 228982 }, + { url = "https://files.pythonhosted.org/packages/82/94/4d1f3e74e7acf8b0c85db350e012dcc61701cd6668bc2440bb1ecb423c90/multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507", size = 226823 }, + { url = "https://files.pythonhosted.org/packages/09/f0/1e54b95bda7cd01080e5732f9abb7b76ab5cc795b66605877caeb2197476/multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427", size = 222714 }, + { url = "https://files.pythonhosted.org/packages/e7/a2/f6cbca875195bd65a3e53b37ab46486f3cc125bdeab20eefe5042afa31fb/multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731", size = 233739 }, + { url = "https://files.pythonhosted.org/packages/79/68/9891f4d2b8569554723ddd6154375295f789dc65809826c6fb96a06314fd/multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713", size = 230809 }, + { url = "https://files.pythonhosted.org/packages/e6/72/a7be29ba1e87e4fc5ceb44dabc7940b8005fd2436a332a23547709315f70/multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a", size = 226934 }, + { url = "https://files.pythonhosted.org/packages/12/c1/259386a9ad6840ff7afc686da96808b503d152ac4feb3a96c651dc4f5abf/multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124", size = 35242 }, + { url = "https://files.pythonhosted.org/packages/06/24/c8fdff4f924d37225dc0c56a28b1dca10728fc2233065fafeb27b4b125be/multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db", size = 38635 }, + { url = "https://files.pythonhosted.org/packages/6c/4b/86fd786d03915c6f49998cf10cd5fe6b6ac9e9a071cb40885d2e080fb90d/multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474", size = 63831 }, + { url = "https://files.pythonhosted.org/packages/45/05/9b51fdf7aef2563340a93be0a663acba2c428c4daeaf3960d92d53a4a930/multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd", size = 37888 }, + { url = "https://files.pythonhosted.org/packages/0b/43/53fc25394386c911822419b522181227ca450cf57fea76e6188772a1bd91/multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b", size = 36852 }, + { url = "https://files.pythonhosted.org/packages/8a/68/7b99c751e822467c94a235b810a2fd4047d4ecb91caef6b5c60116991c4b/multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3", size = 223644 }, + { url = "https://files.pythonhosted.org/packages/80/1b/d458d791e4dd0f7e92596667784fbf99e5c8ba040affe1ca04f06b93ae92/multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac", size = 230446 }, + { url = "https://files.pythonhosted.org/packages/e2/46/9793378d988905491a7806d8987862dc5a0bae8a622dd896c4008c7b226b/multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790", size = 231070 }, + { url = "https://files.pythonhosted.org/packages/a7/b8/b127d3e1f8dd2a5bf286b47b24567ae6363017292dc6dec44656e6246498/multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb", size = 229956 }, + { url = "https://files.pythonhosted.org/packages/0c/93/f70a4c35b103fcfe1443059a2bb7f66e5c35f2aea7804105ff214f566009/multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0", size = 222599 }, + { url = "https://files.pythonhosted.org/packages/63/8c/e28e0eb2fe34921d6aa32bfc4ac75b09570b4d6818cc95d25499fe08dc1d/multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9", size = 216136 }, + { url = "https://files.pythonhosted.org/packages/72/f5/fbc81f866585b05f89f99d108be5d6ad170e3b6c4d0723d1a2f6ba5fa918/multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8", size = 228139 }, + { url = "https://files.pythonhosted.org/packages/bb/ba/7d196bad6b85af2307d81f6979c36ed9665f49626f66d883d6c64d156f78/multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1", size = 226251 }, + { url = "https://files.pythonhosted.org/packages/cc/e2/fae46a370dce79d08b672422a33df721ec8b80105e0ea8d87215ff6b090d/multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817", size = 221868 }, + { url = "https://files.pythonhosted.org/packages/26/20/bbc9a3dec19d5492f54a167f08546656e7aef75d181d3d82541463450e88/multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d", size = 233106 }, + { url = "https://files.pythonhosted.org/packages/ee/8d/f30ae8f5ff7a2461177f4d8eb0d8f69f27fb6cfe276b54ec4fd5a282d918/multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9", size = 230163 }, + { url = "https://files.pythonhosted.org/packages/15/e9/2833f3c218d3c2179f3093f766940ded6b81a49d2e2f9c46ab240d23dfec/multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8", size = 225906 }, + { url = "https://files.pythonhosted.org/packages/f1/31/6edab296ac369fd286b845fa5dd4c409e63bc4655ed8c9510fcb477e9ae9/multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3", size = 35238 }, + { url = "https://files.pythonhosted.org/packages/23/57/2c0167a1bffa30d9a1383c3dab99d8caae985defc8636934b5668830d2ef/multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5", size = 38799 }, + { url = "https://files.pythonhosted.org/packages/c9/13/2ead63b9ab0d2b3080819268acb297bd66e238070aa8d42af12b08cbee1c/multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6", size = 68642 }, + { url = "https://files.pythonhosted.org/packages/85/45/f1a751e1eede30c23951e2ae274ce8fad738e8a3d5714be73e0a41b27b16/multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c", size = 40028 }, + { url = "https://files.pythonhosted.org/packages/a7/29/fcc53e886a2cc5595cc4560df333cb9630257bda65003a7eb4e4e0d8f9c1/multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756", size = 39424 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/056c81119d8b88703971f937b371795cab1407cd3c751482de5bfe1a04a9/multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375", size = 226178 }, + { url = "https://files.pythonhosted.org/packages/a3/79/3b7e5fea0aa80583d3a69c9d98b7913dfd4fbc341fb10bb2fb48d35a9c21/multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be", size = 222617 }, + { url = "https://files.pythonhosted.org/packages/06/db/3ed012b163e376fc461e1d6a67de69b408339bc31dc83d39ae9ec3bf9578/multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea", size = 227919 }, + { url = "https://files.pythonhosted.org/packages/b1/db/0433c104bca380989bc04d3b841fc83e95ce0c89f680e9ea4251118b52b6/multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8", size = 226097 }, + { url = "https://files.pythonhosted.org/packages/c2/95/910db2618175724dd254b7ae635b6cd8d2947a8b76b0376de7b96d814dab/multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02", size = 220706 }, + { url = "https://files.pythonhosted.org/packages/d1/af/aa176c6f5f1d901aac957d5258d5e22897fe13948d1e69063ae3d5d0ca01/multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124", size = 211728 }, + { url = "https://files.pythonhosted.org/packages/e7/42/d51cc5fc1527c3717d7f85137d6c79bb7a93cd214c26f1fc57523774dbb5/multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44", size = 226276 }, + { url = "https://files.pythonhosted.org/packages/28/6b/d836dea45e0b8432343ba4acf9a8ecaa245da4c0960fb7ab45088a5e568a/multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b", size = 212069 }, + { url = "https://files.pythonhosted.org/packages/55/34/0ee1a7adb3560e18ee9289c6e5f7db54edc312b13e5c8263e88ea373d12c/multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504", size = 217858 }, + { url = "https://files.pythonhosted.org/packages/04/08/586d652c2f5acefe0cf4e658eedb4d71d4ba6dfd4f189bd81b400fc1bc6b/multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf", size = 226988 }, + { url = "https://files.pythonhosted.org/packages/82/e3/cc59c7e2bc49d7f906fb4ffb6d9c3a3cf21b9f2dd9c96d05bef89c2b1fd1/multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4", size = 220435 }, + { url = "https://files.pythonhosted.org/packages/e0/32/5c3a556118aca9981d883f38c4b1bfae646f3627157f70f4068e5a648955/multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4", size = 221494 }, + { url = "https://files.pythonhosted.org/packages/b9/3b/1599631f59024b75c4d6e3069f4502409970a336647502aaf6b62fb7ac98/multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5", size = 41775 }, + { url = "https://files.pythonhosted.org/packages/e8/4e/09301668d675d02ca8e8e1a3e6be046619e30403f5ada2ed5b080ae28d02/multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208", size = 45946 }, + { url = "https://files.pythonhosted.org/packages/96/10/7d526c8974f017f1e7ca584c71ee62a638e9334d8d33f27d7cdfc9ae79e4/multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9", size = 10400 }, +] + +[[package]] +name = "neo4j" +version = "5.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/20/733dac16f7cedc80b23093415822c9763302519cba0e7c8bcdb5c01fc512/neo4j-5.28.1.tar.gz", hash = "sha256:ae8e37a1d895099062c75bc359b2cce62099baac7be768d0eba7180c1298e214", size = 231094 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/57/94225fe5e9dabdc0ff60c88cbfcedf11277f4b34e7ab1373d3e62dbdd207/neo4j-5.28.1-py3-none-any.whl", hash = "sha256:6755ef9e5f4e14b403aef1138fb6315b120631a0075c138b5ddb2a06b87b09fd", size = 312258 }, +] + +[[package]] +name = "networkx" +version = "3.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406 }, +] + +[[package]] +name = "nltk" +version = "3.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "joblib" }, + { name = "regex" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442 }, +] + +[[package]] +name = "numpy" +version = "2.2.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/b2/ce4b867d8cd9c0ee84938ae1e6a6f7926ebf928c9090d036fc3c6a04f946/numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291", size = 20273920 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/f7/1fd4ff108cd9d7ef929b8882692e23665dc9c23feecafbb9c6b80f4ec583/numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051", size = 20948633 }, + { url = "https://files.pythonhosted.org/packages/12/03/d443c278348371b20d830af155ff2079acad6a9e60279fac2b41dbbb73d8/numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc", size = 14176123 }, + { url = "https://files.pythonhosted.org/packages/2b/0b/5ca264641d0e7b14393313304da48b225d15d471250376f3fbdb1a2be603/numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e", size = 5163817 }, + { url = "https://files.pythonhosted.org/packages/04/b3/d522672b9e3d28e26e1613de7675b441bbd1eaca75db95680635dd158c67/numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa", size = 6698066 }, + { url = "https://files.pythonhosted.org/packages/a0/93/0f7a75c1ff02d4b76df35079676b3b2719fcdfb39abdf44c8b33f43ef37d/numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571", size = 14087277 }, + { url = "https://files.pythonhosted.org/packages/b0/d9/7c338b923c53d431bc837b5b787052fef9ae68a56fe91e325aac0d48226e/numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073", size = 16135742 }, + { url = "https://files.pythonhosted.org/packages/2d/10/4dec9184a5d74ba9867c6f7d1e9f2e0fb5fe96ff2bf50bb6f342d64f2003/numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8", size = 15581825 }, + { url = "https://files.pythonhosted.org/packages/80/1f/2b6fcd636e848053f5b57712a7d1880b1565eec35a637fdfd0a30d5e738d/numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae", size = 17899600 }, + { url = "https://files.pythonhosted.org/packages/ec/87/36801f4dc2623d76a0a3835975524a84bd2b18fe0f8835d45c8eae2f9ff2/numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb", size = 6312626 }, + { url = "https://files.pythonhosted.org/packages/8b/09/4ffb4d6cfe7ca6707336187951992bd8a8b9142cf345d87ab858d2d7636a/numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282", size = 12645715 }, + { url = "https://files.pythonhosted.org/packages/e2/a0/0aa7f0f4509a2e07bd7a509042967c2fab635690d4f48c6c7b3afd4f448c/numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4", size = 20935102 }, + { url = "https://files.pythonhosted.org/packages/7e/e4/a6a9f4537542912ec513185396fce52cdd45bdcf3e9d921ab02a93ca5aa9/numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f", size = 14191709 }, + { url = "https://files.pythonhosted.org/packages/be/65/72f3186b6050bbfe9c43cb81f9df59ae63603491d36179cf7a7c8d216758/numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9", size = 5149173 }, + { url = "https://files.pythonhosted.org/packages/e5/e9/83e7a9432378dde5802651307ae5e9ea07bb72b416728202218cd4da2801/numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191", size = 6684502 }, + { url = "https://files.pythonhosted.org/packages/ea/27/b80da6c762394c8ee516b74c1f686fcd16c8f23b14de57ba0cad7349d1d2/numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372", size = 14084417 }, + { url = "https://files.pythonhosted.org/packages/aa/fc/ebfd32c3e124e6a1043e19c0ab0769818aa69050ce5589b63d05ff185526/numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d", size = 16133807 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/4cc171a0acbe4666f7775cfd21d4eb6bb1d36d3a0431f48a73e9212d2278/numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7", size = 15575611 }, + { url = "https://files.pythonhosted.org/packages/a3/45/40f4135341850df48f8edcf949cf47b523c404b712774f8855a64c96ef29/numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73", size = 17895747 }, + { url = "https://files.pythonhosted.org/packages/f8/4c/b32a17a46f0ffbde8cc82df6d3daeaf4f552e346df143e1b188a701a8f09/numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b", size = 6309594 }, + { url = "https://files.pythonhosted.org/packages/13/ae/72e6276feb9ef06787365b05915bfdb057d01fceb4a43cb80978e518d79b/numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471", size = 12638356 }, + { url = "https://files.pythonhosted.org/packages/79/56/be8b85a9f2adb688e7ded6324e20149a03541d2b3297c3ffc1a73f46dedb/numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6", size = 20963778 }, + { url = "https://files.pythonhosted.org/packages/ff/77/19c5e62d55bff507a18c3cdff82e94fe174957bad25860a991cac719d3ab/numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba", size = 14207279 }, + { url = "https://files.pythonhosted.org/packages/75/22/aa11f22dc11ff4ffe4e849d9b63bbe8d4ac6d5fae85ddaa67dfe43be3e76/numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133", size = 5199247 }, + { url = "https://files.pythonhosted.org/packages/4f/6c/12d5e760fc62c08eded0394f62039f5a9857f758312bf01632a81d841459/numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376", size = 6711087 }, + { url = "https://files.pythonhosted.org/packages/ef/94/ece8280cf4218b2bee5cec9567629e61e51b4be501e5c6840ceb593db945/numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19", size = 14059964 }, + { url = "https://files.pythonhosted.org/packages/39/41/c5377dac0514aaeec69115830a39d905b1882819c8e65d97fc60e177e19e/numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0", size = 16121214 }, + { url = "https://files.pythonhosted.org/packages/db/54/3b9f89a943257bc8e187145c6bc0eb8e3d615655f7b14e9b490b053e8149/numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a", size = 15575788 }, + { url = "https://files.pythonhosted.org/packages/b1/c4/2e407e85df35b29f79945751b8f8e671057a13a376497d7fb2151ba0d290/numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066", size = 17893672 }, + { url = "https://files.pythonhosted.org/packages/29/7e/d0b44e129d038dba453f00d0e29ebd6eaf2f06055d72b95b9947998aca14/numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e", size = 6377102 }, + { url = "https://files.pythonhosted.org/packages/63/be/b85e4aa4bf42c6502851b971f1c326d583fcc68227385f92089cf50a7b45/numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8", size = 12750096 }, +] + +[[package]] +name = "nvidia-cublas-cu12" +version = "12.6.4.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/eb/ff4b8c503fa1f1796679dce648854d58751982426e4e4b37d6fce49d259c/nvidia_cublas_cu12-12.6.4.1-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:08ed2686e9875d01b58e3cb379c6896df8e76c75e0d4a7f7dace3d7b6d9ef8eb", size = 393138322 }, +] + +[[package]] +name = "nvidia-cuda-cupti-cu12" +version = "12.6.80" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/60/7b6497946d74bcf1de852a21824d63baad12cd417db4195fc1bfe59db953/nvidia_cuda_cupti_cu12-12.6.80-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6768bad6cab4f19e8292125e5f1ac8aa7d1718704012a0e3272a6f61c4bce132", size = 8917980 }, + { url = "https://files.pythonhosted.org/packages/a5/24/120ee57b218d9952c379d1e026c4479c9ece9997a4fb46303611ee48f038/nvidia_cuda_cupti_cu12-12.6.80-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a3eff6cdfcc6a4c35db968a06fcadb061cbc7d6dde548609a941ff8701b98b73", size = 8917972 }, +] + +[[package]] +name = "nvidia-cuda-nvrtc-cu12" +version = "12.6.77" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/2e/46030320b5a80661e88039f59060d1790298b4718944a65a7f2aeda3d9e9/nvidia_cuda_nvrtc_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:35b0cc6ee3a9636d5409133e79273ce1f3fd087abb0532d2d2e8fff1fe9efc53", size = 23650380 }, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.6.77" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/23/e717c5ac26d26cf39a27fbc076240fad2e3b817e5889d671b67f4f9f49c5/nvidia_cuda_runtime_cu12-12.6.77-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ba3b56a4f896141e25e19ab287cd71e52a6a0f4b29d0d31609f60e3b4d5219b7", size = 897690 }, + { url = "https://files.pythonhosted.org/packages/f0/62/65c05e161eeddbafeca24dc461f47de550d9fa8a7e04eb213e32b55cfd99/nvidia_cuda_runtime_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a84d15d5e1da416dd4774cb42edf5e954a3e60cc945698dc1d5be02321c44dc8", size = 897678 }, +] + +[[package]] +name = "nvidia-cudnn-cu12" +version = "9.5.1.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/78/4535c9c7f859a64781e43c969a3a7e84c54634e319a996d43ef32ce46f83/nvidia_cudnn_cu12-9.5.1.17-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:30ac3869f6db17d170e0e556dd6cc5eee02647abc31ca856634d5a40f82c15b2", size = 570988386 }, +] + +[[package]] +name = "nvidia-cufft-cu12" +version = "11.3.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/16/73727675941ab8e6ffd86ca3a4b7b47065edcca7a997920b831f8147c99d/nvidia_cufft_cu12-11.3.0.4-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ccba62eb9cef5559abd5e0d54ceed2d9934030f51163df018532142a8ec533e5", size = 200221632 }, + { url = "https://files.pythonhosted.org/packages/60/de/99ec247a07ea40c969d904fc14f3a356b3e2a704121675b75c366b694ee1/nvidia_cufft_cu12-11.3.0.4-py3-none-manylinux2014_x86_64.whl", hash = "sha256:768160ac89f6f7b459bee747e8d175dbf53619cfe74b2a5636264163138013ca", size = 200221622 }, +] + +[[package]] +name = "nvidia-cufile-cu12" +version = "1.11.1.6" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/66/cc9876340ac68ae71b15c743ddb13f8b30d5244af344ec8322b449e35426/nvidia_cufile_cu12-1.11.1.6-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc23469d1c7e52ce6c1d55253273d32c565dd22068647f3aa59b3c6b005bf159", size = 1142103 }, +] + +[[package]] +name = "nvidia-curand-cu12" +version = "10.3.7.77" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/1b/44a01c4e70933637c93e6e1a8063d1e998b50213a6b65ac5a9169c47e98e/nvidia_curand_cu12-10.3.7.77-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a42cd1344297f70b9e39a1e4f467a4e1c10f1da54ff7a85c12197f6c652c8bdf", size = 56279010 }, + { url = "https://files.pythonhosted.org/packages/4a/aa/2c7ff0b5ee02eaef890c0ce7d4f74bc30901871c5e45dee1ae6d0083cd80/nvidia_curand_cu12-10.3.7.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:99f1a32f1ac2bd134897fc7a203f779303261268a65762a623bf30cc9fe79117", size = 56279000 }, +] + +[[package]] +name = "nvidia-cusolver-cu12" +version = "11.7.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cusparse-cu12" }, + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/6e/c2cf12c9ff8b872e92b4a5740701e51ff17689c4d726fca91875b07f655d/nvidia_cusolver_cu12-11.7.1.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e9e49843a7707e42022babb9bcfa33c29857a93b88020c4e4434656a655b698c", size = 158229790 }, + { url = "https://files.pythonhosted.org/packages/9f/81/baba53585da791d043c10084cf9553e074548408e04ae884cfe9193bd484/nvidia_cusolver_cu12-11.7.1.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6cf28f17f64107a0c4d7802be5ff5537b2130bfc112f25d5a30df227058ca0e6", size = 158229780 }, +] + +[[package]] +name = "nvidia-cusparse-cu12" +version = "12.5.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/1e/b8b7c2f4099a37b96af5c9bb158632ea9e5d9d27d7391d7eb8fc45236674/nvidia_cusparse_cu12-12.5.4.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7556d9eca156e18184b94947ade0fba5bb47d69cec46bf8660fd2c71a4b48b73", size = 216561367 }, + { url = "https://files.pythonhosted.org/packages/43/ac/64c4316ba163e8217a99680c7605f779accffc6a4bcd0c778c12948d3707/nvidia_cusparse_cu12-12.5.4.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:23749a6571191a215cb74d1cdbff4a86e7b19f1200c071b3fcf844a5bea23a2f", size = 216561357 }, +] + +[[package]] +name = "nvidia-cusparselt-cu12" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/9a/72ef35b399b0e183bc2e8f6f558036922d453c4d8237dab26c666a04244b/nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e5c8a26c36445dd2e6812f1177978a24e2d37cacce7e090f297a688d1ec44f46", size = 156785796 }, +] + +[[package]] +name = "nvidia-nccl-cu12" +version = "2.26.2" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/ca/f42388aed0fddd64ade7493dbba36e1f534d4e6fdbdd355c6a90030ae028/nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:694cf3879a206553cc9d7dbda76b13efaf610fdb70a50cba303de1b0d1530ac6", size = 201319755 }, +] + +[[package]] +name = "nvidia-nvjitlink-cu12" +version = "12.6.85" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/d7/c5383e47c7e9bf1c99d5bd2a8c935af2b6d705ad831a7ec5c97db4d82f4f/nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:eedc36df9e88b682efe4309aa16b5b4e78c2407eac59e8c10a6a47535164369a", size = 19744971 }, +] + +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.6.77" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/9a/fff8376f8e3d084cd1530e1ef7b879bb7d6d265620c95c1b322725c694f4/nvidia_nvtx_cu12-12.6.77-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b90bed3df379fa79afbd21be8e04a0314336b8ae16768b58f2d34cb1d04cd7d2", size = 89276 }, + { url = "https://files.pythonhosted.org/packages/9e/4e/0d0c945463719429b7bd21dece907ad0bde437a2ff12b9b12fee94722ab0/nvidia_nvtx_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6574241a3ec5fdc9334353ab8c479fe75841dbe8f4532a8fc97ce63503330ba1", size = 89265 }, +] + +[[package]] +name = "openai" +version = "1.71.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/19/b8f0347090a649dce55a008ec54ac6abb50553a06508cdb5e7abb2813e99/openai-1.71.0.tar.gz", hash = "sha256:52b20bb990a1780f9b0b8ccebac93416343ebd3e4e714e3eff730336833ca207", size = 409926 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/f7/049e85faf6a000890e5ca0edca8e9183f8a43c9e7bba869cad871da0caba/openai-1.71.0-py3-none-any.whl", hash = "sha256:e1c643738f1fff1af52bce6ef06a7716c95d089281e7011777179614f32937aa", size = 598975 }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, +] + +[[package]] +name = "pillow" +version = "10.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350 }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980 }, + { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799 }, + { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973 }, + { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054 }, + { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484 }, + { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375 }, + { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773 }, + { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690 }, + { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951 }, + { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427 }, + { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685 }, + { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883 }, + { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837 }, + { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562 }, + { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761 }, + { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767 }, + { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989 }, + { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255 }, + { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603 }, + { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972 }, + { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375 }, +] + +[[package]] +name = "playwright" +version = "1.52.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet" }, + { name = "pyee" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/62/a20240605485ca99365a8b72ed95e0b4c5739a13fb986353f72d8d3f1d27/playwright-1.52.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:19b2cb9d4794062008a635a99bd135b03ebb782d460f96534a91cb583f549512", size = 39611246 }, + { url = "https://files.pythonhosted.org/packages/dc/23/57ff081663b3061a2a3f0e111713046f705da2595f2f384488a76e4db732/playwright-1.52.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0797c0479cbdc99607412a3c486a3a2ec9ddc77ac461259fd2878c975bcbb94a", size = 37962977 }, + { url = "https://files.pythonhosted.org/packages/a2/ff/eee8532cff4b3d768768152e8c4f30d3caa80f2969bf3143f4371d377b74/playwright-1.52.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:7223960b7dd7ddeec1ba378c302d1d09733b8dac438f492e9854c85d3ca7144f", size = 39611247 }, + { url = "https://files.pythonhosted.org/packages/73/c6/8e27af9798f81465b299741ef57064c6ec1a31128ed297406469907dc5a4/playwright-1.52.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:d010124d24a321e0489a8c0d38a3971a7ca7656becea7656c9376bfea7f916d4", size = 45141333 }, + { url = "https://files.pythonhosted.org/packages/4e/e9/0661d343ed55860bcfb8934ce10e9597fc953358773ece507b22b0f35c57/playwright-1.52.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4173e453c43180acc60fd77ffe1ebee8d0efbfd9986c03267007b9c3845415af", size = 44540623 }, + { url = "https://files.pythonhosted.org/packages/7a/81/a850dbc6bc2e1bd6cc87341e59c253269602352de83d34b00ea38cf410ee/playwright-1.52.0-py3-none-win32.whl", hash = "sha256:cd0bdf92df99db6237a99f828e80a6a50db6180ef8d5352fc9495df2c92f9971", size = 34839156 }, + { url = "https://files.pythonhosted.org/packages/51/f3/cca2aa84eb28ea7d5b85d16caa92d62d18b6e83636e3d67957daca1ee4c7/playwright-1.52.0-py3-none-win_amd64.whl", hash = "sha256:dcbf75101eba3066b7521c6519de58721ea44379eb17a0dafa94f9f1b17f59e4", size = 34839164 }, + { url = "https://files.pythonhosted.org/packages/b5/4f/71a8a873e8c3c3e2d3ec03a578e546f6875be8a76214d90219f752f827cd/playwright-1.52.0-py3-none-win_arm64.whl", hash = "sha256:9d0085b8de513de5fb50669f8e6677f0252ef95a9a1d2d23ccee9638e71e65cb", size = 30688972 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "postgrest" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecation" }, + { name = "httpx", extra = ["http2"] }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/fb/be6216146156a22069fe87cea086e0308ca3595c10d7df90b70ef6ec339f/postgrest-1.0.1.tar.gz", hash = "sha256:0d6556dadfd8392147d98aad097fe7bf0196602e28a58eee5e9bde4390bb573f", size = 15147 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/0b/526f09779066e5c7716ede56a0394b1282a66b8381974879a77ae590c639/postgrest-1.0.1-py3-none-any.whl", hash = "sha256:fcc0518d68d924198c41c8cbaa70c342c641cb49311be33ba4fc74b4e742f22e", size = 22307 }, +] + +[[package]] +name = "propcache" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/c8/fdc6686a986feae3541ea23dcaa661bd93972d3940460646c6bb96e21c40/propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf", size = 43651 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/aa/ca78d9be314d1e15ff517b992bebbed3bdfef5b8919e85bf4940e57b6137/propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723", size = 80430 }, + { url = "https://files.pythonhosted.org/packages/1a/d8/f0c17c44d1cda0ad1979af2e593ea290defdde9eaeb89b08abbe02a5e8e1/propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976", size = 46637 }, + { url = "https://files.pythonhosted.org/packages/ae/bd/c1e37265910752e6e5e8a4c1605d0129e5b7933c3dc3cf1b9b48ed83b364/propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b", size = 46123 }, + { url = "https://files.pythonhosted.org/packages/d4/b0/911eda0865f90c0c7e9f0415d40a5bf681204da5fd7ca089361a64c16b28/propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f", size = 243031 }, + { url = "https://files.pythonhosted.org/packages/0a/06/0da53397c76a74271621807265b6eb61fb011451b1ddebf43213df763669/propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70", size = 249100 }, + { url = "https://files.pythonhosted.org/packages/f1/eb/13090e05bf6b963fc1653cdc922133ced467cb4b8dab53158db5a37aa21e/propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7", size = 250170 }, + { url = "https://files.pythonhosted.org/packages/3b/4c/f72c9e1022b3b043ec7dc475a0f405d4c3e10b9b1d378a7330fecf0652da/propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25", size = 245000 }, + { url = "https://files.pythonhosted.org/packages/e8/fd/970ca0e22acc829f1adf5de3724085e778c1ad8a75bec010049502cb3a86/propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277", size = 230262 }, + { url = "https://files.pythonhosted.org/packages/c4/42/817289120c6b9194a44f6c3e6b2c3277c5b70bbad39e7df648f177cc3634/propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8", size = 236772 }, + { url = "https://files.pythonhosted.org/packages/7c/9c/3b3942b302badd589ad6b672da3ca7b660a6c2f505cafd058133ddc73918/propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e", size = 231133 }, + { url = "https://files.pythonhosted.org/packages/98/a1/75f6355f9ad039108ff000dfc2e19962c8dea0430da9a1428e7975cf24b2/propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee", size = 230741 }, + { url = "https://files.pythonhosted.org/packages/67/0c/3e82563af77d1f8731132166da69fdfd95e71210e31f18edce08a1eb11ea/propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815", size = 244047 }, + { url = "https://files.pythonhosted.org/packages/f7/50/9fb7cca01532a08c4d5186d7bb2da6c4c587825c0ae134b89b47c7d62628/propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5", size = 246467 }, + { url = "https://files.pythonhosted.org/packages/a9/02/ccbcf3e1c604c16cc525309161d57412c23cf2351523aedbb280eb7c9094/propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7", size = 241022 }, + { url = "https://files.pythonhosted.org/packages/db/19/e777227545e09ca1e77a6e21274ae9ec45de0f589f0ce3eca2a41f366220/propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b", size = 40647 }, + { url = "https://files.pythonhosted.org/packages/24/bb/3b1b01da5dd04c77a204c84e538ff11f624e31431cfde7201d9110b092b1/propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3", size = 44784 }, + { url = "https://files.pythonhosted.org/packages/58/60/f645cc8b570f99be3cf46714170c2de4b4c9d6b827b912811eff1eb8a412/propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8", size = 77865 }, + { url = "https://files.pythonhosted.org/packages/6f/d4/c1adbf3901537582e65cf90fd9c26fde1298fde5a2c593f987112c0d0798/propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f", size = 45452 }, + { url = "https://files.pythonhosted.org/packages/d1/b5/fe752b2e63f49f727c6c1c224175d21b7d1727ce1d4873ef1c24c9216830/propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111", size = 44800 }, + { url = "https://files.pythonhosted.org/packages/62/37/fc357e345bc1971e21f76597028b059c3d795c5ca7690d7a8d9a03c9708a/propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5", size = 225804 }, + { url = "https://files.pythonhosted.org/packages/0d/f1/16e12c33e3dbe7f8b737809bad05719cff1dccb8df4dafbcff5575002c0e/propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb", size = 230650 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/018b9f2ed876bf5091e60153f727e8f9073d97573f790ff7cdf6bc1d1fb8/propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7", size = 234235 }, + { url = "https://files.pythonhosted.org/packages/45/5f/3faee66fc930dfb5da509e34c6ac7128870631c0e3582987fad161fcb4b1/propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120", size = 228249 }, + { url = "https://files.pythonhosted.org/packages/62/1e/a0d5ebda5da7ff34d2f5259a3e171a94be83c41eb1e7cd21a2105a84a02e/propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654", size = 214964 }, + { url = "https://files.pythonhosted.org/packages/db/a0/d72da3f61ceab126e9be1f3bc7844b4e98c6e61c985097474668e7e52152/propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e", size = 222501 }, + { url = "https://files.pythonhosted.org/packages/18/6d/a008e07ad7b905011253adbbd97e5b5375c33f0b961355ca0a30377504ac/propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b", size = 217917 }, + { url = "https://files.pythonhosted.org/packages/98/37/02c9343ffe59e590e0e56dc5c97d0da2b8b19fa747ebacf158310f97a79a/propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53", size = 217089 }, + { url = "https://files.pythonhosted.org/packages/53/1b/d3406629a2c8a5666d4674c50f757a77be119b113eedd47b0375afdf1b42/propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5", size = 228102 }, + { url = "https://files.pythonhosted.org/packages/cd/a7/3664756cf50ce739e5f3abd48febc0be1a713b1f389a502ca819791a6b69/propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7", size = 230122 }, + { url = "https://files.pythonhosted.org/packages/35/36/0bbabaacdcc26dac4f8139625e930f4311864251276033a52fd52ff2a274/propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef", size = 226818 }, + { url = "https://files.pythonhosted.org/packages/cc/27/4e0ef21084b53bd35d4dae1634b6d0bad35e9c58ed4f032511acca9d4d26/propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24", size = 40112 }, + { url = "https://files.pythonhosted.org/packages/a6/2c/a54614d61895ba6dd7ac8f107e2b2a0347259ab29cbf2ecc7b94fa38c4dc/propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037", size = 44034 }, + { url = "https://files.pythonhosted.org/packages/5a/a8/0a4fd2f664fc6acc66438370905124ce62e84e2e860f2557015ee4a61c7e/propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f", size = 82613 }, + { url = "https://files.pythonhosted.org/packages/4d/e5/5ef30eb2cd81576256d7b6caaa0ce33cd1d2c2c92c8903cccb1af1a4ff2f/propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c", size = 47763 }, + { url = "https://files.pythonhosted.org/packages/87/9a/87091ceb048efeba4d28e903c0b15bcc84b7c0bf27dc0261e62335d9b7b8/propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc", size = 47175 }, + { url = "https://files.pythonhosted.org/packages/3e/2f/854e653c96ad1161f96194c6678a41bbb38c7947d17768e8811a77635a08/propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de", size = 292265 }, + { url = "https://files.pythonhosted.org/packages/40/8d/090955e13ed06bc3496ba4a9fb26c62e209ac41973cb0d6222de20c6868f/propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6", size = 294412 }, + { url = "https://files.pythonhosted.org/packages/39/e6/d51601342e53cc7582449e6a3c14a0479fab2f0750c1f4d22302e34219c6/propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7", size = 294290 }, + { url = "https://files.pythonhosted.org/packages/3b/4d/be5f1a90abc1881884aa5878989a1acdafd379a91d9c7e5e12cef37ec0d7/propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458", size = 282926 }, + { url = "https://files.pythonhosted.org/packages/57/2b/8f61b998c7ea93a2b7eca79e53f3e903db1787fca9373af9e2cf8dc22f9d/propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11", size = 267808 }, + { url = "https://files.pythonhosted.org/packages/11/1c/311326c3dfce59c58a6098388ba984b0e5fb0381ef2279ec458ef99bd547/propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c", size = 290916 }, + { url = "https://files.pythonhosted.org/packages/4b/74/91939924b0385e54dc48eb2e4edd1e4903ffd053cf1916ebc5347ac227f7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf", size = 262661 }, + { url = "https://files.pythonhosted.org/packages/c2/d7/e6079af45136ad325c5337f5dd9ef97ab5dc349e0ff362fe5c5db95e2454/propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27", size = 264384 }, + { url = "https://files.pythonhosted.org/packages/b7/d5/ba91702207ac61ae6f1c2da81c5d0d6bf6ce89e08a2b4d44e411c0bbe867/propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757", size = 291420 }, + { url = "https://files.pythonhosted.org/packages/58/70/2117780ed7edcd7ba6b8134cb7802aada90b894a9810ec56b7bb6018bee7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18", size = 290880 }, + { url = "https://files.pythonhosted.org/packages/4a/1f/ecd9ce27710021ae623631c0146719280a929d895a095f6d85efb6a0be2e/propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a", size = 287407 }, + { url = "https://files.pythonhosted.org/packages/3e/66/2e90547d6b60180fb29e23dc87bd8c116517d4255240ec6d3f7dc23d1926/propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d", size = 42573 }, + { url = "https://files.pythonhosted.org/packages/cb/8f/50ad8599399d1861b4d2b6b45271f0ef6af1b09b0a2386a46dbaf19c9535/propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e", size = 46757 }, + { url = "https://files.pythonhosted.org/packages/b8/d3/c3cb8f1d6ae3b37f83e1de806713a9b3642c5895f0215a62e1a4bd6e5e34/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40", size = 12376 }, +] + +[[package]] +name = "psutil" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "2.11.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900 }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, +] + +[[package]] +name = "pydantic-settings" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356 }, +] + +[[package]] +name = "pyee" +version = "13.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730 }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, +] + +[[package]] +name = "pyopenssl" +version = "25.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/26/e25b4a374b4639e0c235527bbe31c0524f26eda701d79456a7e1877f4cc5/pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16", size = 179573 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/d7/eb76863d2060dcbe7c7e6cccfd95ac02ea0b9acc37745a0d99ff6457aefb/pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90", size = 56453 }, +] + +[[package]] +name = "pyperclip" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961 } + +[[package]] +name = "pytest" +version = "8.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, +] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "rank-bm25" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/0a/f9579384aa017d8b4c15613f86954b92a95a93d641cc849182467cf0bb3b/rank_bm25-0.2.2.tar.gz", hash = "sha256:096ccef76f8188563419aaf384a02f0ea459503fdf77901378d4fd9d87e5e51d", size = 8347 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/21/f691fb2613100a62b3fa91e9988c991e9ca5b89ea31c0d3152a3210344f9/rank_bm25-0.2.2-py3-none-any.whl", hash = "sha256:7bd4a95571adadfc271746fa146a4bcfd89c0cf731e49c3d1ad863290adbe8ae", size = 8584 }, +] + +[[package]] +name = "realtime" +version = "2.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "python-dateutil" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/75/fc/ef69bd4a1bf30a5435bc2d09f6c33bfef5f317746b1a4ca2932ef14b22fc/realtime-2.4.3.tar.gz", hash = "sha256:152febabc822ce60e11f202842c5aa6858ae4bd04920bfd6a00c1dd492f426b0", size = 18849 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/0c/68ce3db6354c466f68bba2be0fe0ad3a93dca8219e10b9bad3138077efec/realtime-2.4.3-py3-none-any.whl", hash = "sha256:09ff3b61ac928413a27765640b67362380eaddba84a7037a17972a64b1ac52f7", size = 22086 }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 }, +] + +[[package]] +name = "regex" +version = "2024.11.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781 }, + { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455 }, + { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759 }, + { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976 }, + { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077 }, + { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160 }, + { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896 }, + { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997 }, + { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725 }, + { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481 }, + { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896 }, + { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138 }, + { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692 }, + { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135 }, + { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567 }, + { url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84", size = 483525 }, + { url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4", size = 288324 }, + { url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0", size = 284617 }, + { url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0", size = 795023 }, + { url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7", size = 833072 }, + { url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7", size = 823130 }, + { url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c", size = 796857 }, + { url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3", size = 784006 }, + { url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07", size = 781650 }, + { url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e", size = 789545 }, + { url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6", size = 853045 }, + { url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4", size = 860182 }, + { url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733 }, + { url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff", size = 262122 }, + { url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a", size = 273545 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "rich" +version = "14.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 }, +] + +[[package]] +name = "rpds-py" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/b3/52b213298a0ba7097c7ea96bee95e1947aa84cc816d48cebb539770cdf41/rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e", size = 26863 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/e0/1c55f4a3be5f1ca1a4fd1f3ff1504a1478c1ed48d84de24574c4fa87e921/rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205", size = 366945 }, + { url = "https://files.pythonhosted.org/packages/39/1b/a3501574fbf29118164314dbc800d568b8c1c7b3258b505360e8abb3902c/rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7", size = 351935 }, + { url = "https://files.pythonhosted.org/packages/dc/47/77d3d71c55f6a374edde29f1aca0b2e547325ed00a9da820cabbc9497d2b/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9", size = 390817 }, + { url = "https://files.pythonhosted.org/packages/4e/ec/1e336ee27484379e19c7f9cc170f4217c608aee406d3ae3a2e45336bff36/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e", size = 401983 }, + { url = "https://files.pythonhosted.org/packages/07/f8/39b65cbc272c635eaea6d393c2ad1ccc81c39eca2db6723a0ca4b2108fce/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda", size = 451719 }, + { url = "https://files.pythonhosted.org/packages/32/05/05c2b27dd9c30432f31738afed0300659cb9415db0ff7429b05dfb09bbde/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e", size = 442546 }, + { url = "https://files.pythonhosted.org/packages/7d/e0/19383c8b5d509bd741532a47821c3e96acf4543d0832beba41b4434bcc49/rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029", size = 393695 }, + { url = "https://files.pythonhosted.org/packages/9d/15/39f14e96d94981d0275715ae8ea564772237f3fa89bc3c21e24de934f2c7/rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9", size = 427218 }, + { url = "https://files.pythonhosted.org/packages/22/b9/12da7124905a680f690da7a9de6f11de770b5e359f5649972f7181c8bf51/rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7", size = 568062 }, + { url = "https://files.pythonhosted.org/packages/88/17/75229017a2143d915f6f803721a6d721eca24f2659c5718a538afa276b4f/rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91", size = 596262 }, + { url = "https://files.pythonhosted.org/packages/aa/64/8e8a1d8bd1b6b638d6acb6d41ab2cec7f2067a5b8b4c9175703875159a7c/rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56", size = 564306 }, + { url = "https://files.pythonhosted.org/packages/68/1c/a7eac8d8ed8cb234a9b1064647824c387753343c3fab6ed7c83481ed0be7/rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30", size = 224281 }, + { url = "https://files.pythonhosted.org/packages/bb/46/b8b5424d1d21f2f2f3f2d468660085318d4f74a8df8289e3dd6ad224d488/rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034", size = 239719 }, + { url = "https://files.pythonhosted.org/packages/9d/c3/3607abc770395bc6d5a00cb66385a5479fb8cd7416ddef90393b17ef4340/rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c", size = 367072 }, + { url = "https://files.pythonhosted.org/packages/d8/35/8c7ee0fe465793e3af3298dc5a9f3013bd63e7a69df04ccfded8293a4982/rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c", size = 351919 }, + { url = "https://files.pythonhosted.org/packages/91/d3/7e1b972501eb5466b9aca46a9c31bcbbdc3ea5a076e9ab33f4438c1d069d/rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240", size = 390360 }, + { url = "https://files.pythonhosted.org/packages/a2/a8/ccabb50d3c91c26ad01f9b09a6a3b03e4502ce51a33867c38446df9f896b/rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8", size = 400704 }, + { url = "https://files.pythonhosted.org/packages/53/ae/5fa5bf0f3bc6ce21b5ea88fc0ecd3a439e7cb09dd5f9ffb3dbe1b6894fc5/rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8", size = 450839 }, + { url = "https://files.pythonhosted.org/packages/e3/ac/c4e18b36d9938247e2b54f6a03746f3183ca20e1edd7d3654796867f5100/rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b", size = 441494 }, + { url = "https://files.pythonhosted.org/packages/bf/08/b543969c12a8f44db6c0f08ced009abf8f519191ca6985509e7c44102e3c/rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d", size = 393185 }, + { url = "https://files.pythonhosted.org/packages/da/7e/f6eb6a7042ce708f9dfc781832a86063cea8a125bbe451d663697b51944f/rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7", size = 426168 }, + { url = "https://files.pythonhosted.org/packages/38/b0/6cd2bb0509ac0b51af4bb138e145b7c4c902bb4b724d6fd143689d6e0383/rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad", size = 567622 }, + { url = "https://files.pythonhosted.org/packages/64/b0/c401f4f077547d98e8b4c2ec6526a80e7cb04f519d416430ec1421ee9e0b/rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120", size = 595435 }, + { url = "https://files.pythonhosted.org/packages/9f/ec/7993b6e803294c87b61c85bd63e11142ccfb2373cf88a61ec602abcbf9d6/rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9", size = 563762 }, + { url = "https://files.pythonhosted.org/packages/1f/29/4508003204cb2f461dc2b83dd85f8aa2b915bc98fe6046b9d50d4aa05401/rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143", size = 223510 }, + { url = "https://files.pythonhosted.org/packages/f9/12/09e048d1814195e01f354155fb772fb0854bd3450b5f5a82224b3a319f0e/rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a", size = 239075 }, + { url = "https://files.pythonhosted.org/packages/d2/03/5027cde39bb2408d61e4dd0cf81f815949bb629932a6c8df1701d0257fc4/rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114", size = 362974 }, + { url = "https://files.pythonhosted.org/packages/bf/10/24d374a2131b1ffafb783e436e770e42dfdb74b69a2cd25eba8c8b29d861/rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405", size = 348730 }, + { url = "https://files.pythonhosted.org/packages/7a/d1/1ef88d0516d46cd8df12e5916966dbf716d5ec79b265eda56ba1b173398c/rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47", size = 387627 }, + { url = "https://files.pythonhosted.org/packages/4e/35/07339051b8b901ecefd449ebf8e5522e92bcb95e1078818cbfd9db8e573c/rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272", size = 394094 }, + { url = "https://files.pythonhosted.org/packages/dc/62/ee89ece19e0ba322b08734e95441952062391065c157bbd4f8802316b4f1/rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd", size = 449639 }, + { url = "https://files.pythonhosted.org/packages/15/24/b30e9f9e71baa0b9dada3a4ab43d567c6b04a36d1cb531045f7a8a0a7439/rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a", size = 438584 }, + { url = "https://files.pythonhosted.org/packages/28/d9/49f7b8f3b4147db13961e19d5e30077cd0854ccc08487026d2cb2142aa4a/rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d", size = 391047 }, + { url = "https://files.pythonhosted.org/packages/49/b0/e66918d0972c33a259ba3cd7b7ff10ed8bd91dbcfcbec6367b21f026db75/rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7", size = 418085 }, + { url = "https://files.pythonhosted.org/packages/e1/6b/99ed7ea0a94c7ae5520a21be77a82306aac9e4e715d4435076ead07d05c6/rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d", size = 564498 }, + { url = "https://files.pythonhosted.org/packages/28/26/1cacfee6b800e6fb5f91acecc2e52f17dbf8b0796a7c984b4568b6d70e38/rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797", size = 590202 }, + { url = "https://files.pythonhosted.org/packages/a9/9e/57bd2f9fba04a37cef673f9a66b11ca8c43ccdd50d386c455cd4380fe461/rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c", size = 561771 }, + { url = "https://files.pythonhosted.org/packages/9f/cf/b719120f375ab970d1c297dbf8de1e3c9edd26fe92c0ed7178dd94b45992/rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba", size = 221195 }, + { url = "https://files.pythonhosted.org/packages/2d/e5/22865285789f3412ad0c3d7ec4dc0a3e86483b794be8a5d9ed5a19390900/rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350", size = 237354 }, +] + +[[package]] +name = "safetensors" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073", size = 436917 }, + { url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7", size = 418419 }, + { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493 }, + { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400 }, + { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891 }, + { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694 }, + { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642 }, + { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241 }, + { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001 }, + { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013 }, + { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687 }, + { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147 }, + { url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl", hash = "sha256:cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace", size = 296677 }, + { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878 }, +] + +[[package]] +name = "scikit-learn" +version = "1.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/a5/4ae3b3a0755f7b35a280ac90b28817d1f380318973cff14075ab41ef50d9/scikit_learn-1.6.1.tar.gz", hash = "sha256:b4fc2525eca2c69a59260f583c56a7557c6ccdf8deafdba6e060f94c1c59738e", size = 7068312 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/18/c797c9b8c10380d05616db3bfb48e2a3358c767affd0857d56c2eb501caa/scikit_learn-1.6.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:926f207c804104677af4857b2c609940b743d04c4c35ce0ddc8ff4f053cddc1b", size = 12104516 }, + { url = "https://files.pythonhosted.org/packages/c4/b7/2e35f8e289ab70108f8cbb2e7a2208f0575dc704749721286519dcf35f6f/scikit_learn-1.6.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c2cae262064e6a9b77eee1c8e768fc46aa0b8338c6a8297b9b6759720ec0ff2", size = 11167837 }, + { url = "https://files.pythonhosted.org/packages/a4/f6/ff7beaeb644bcad72bcfd5a03ff36d32ee4e53a8b29a639f11bcb65d06cd/scikit_learn-1.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1061b7c028a8663fb9a1a1baf9317b64a257fcb036dae5c8752b2abef31d136f", size = 12253728 }, + { url = "https://files.pythonhosted.org/packages/29/7a/8bce8968883e9465de20be15542f4c7e221952441727c4dad24d534c6d99/scikit_learn-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e69fab4ebfc9c9b580a7a80111b43d214ab06250f8a7ef590a4edf72464dd86", size = 13147700 }, + { url = "https://files.pythonhosted.org/packages/62/27/585859e72e117fe861c2079bcba35591a84f801e21bc1ab85bce6ce60305/scikit_learn-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:70b1d7e85b1c96383f872a519b3375f92f14731e279a7b4c6cfd650cf5dffc52", size = 11110613 }, + { url = "https://files.pythonhosted.org/packages/2e/59/8eb1872ca87009bdcdb7f3cdc679ad557b992c12f4b61f9250659e592c63/scikit_learn-1.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ffa1e9e25b3d93990e74a4be2c2fc61ee5af85811562f1288d5d055880c4322", size = 12010001 }, + { url = "https://files.pythonhosted.org/packages/9d/05/f2fc4effc5b32e525408524c982c468c29d22f828834f0625c5ef3d601be/scikit_learn-1.6.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:dc5cf3d68c5a20ad6d571584c0750ec641cc46aeef1c1507be51300e6003a7e1", size = 11096360 }, + { url = "https://files.pythonhosted.org/packages/c8/e4/4195d52cf4f113573fb8ebc44ed5a81bd511a92c0228889125fac2f4c3d1/scikit_learn-1.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c06beb2e839ecc641366000ca84f3cf6fa9faa1777e29cf0c04be6e4d096a348", size = 12209004 }, + { url = "https://files.pythonhosted.org/packages/94/be/47e16cdd1e7fcf97d95b3cb08bde1abb13e627861af427a3651fcb80b517/scikit_learn-1.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8ca8cb270fee8f1f76fa9bfd5c3507d60c6438bbee5687f81042e2bb98e5a97", size = 13171776 }, + { url = "https://files.pythonhosted.org/packages/34/b0/ca92b90859070a1487827dbc672f998da95ce83edce1270fc23f96f1f61a/scikit_learn-1.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:7a1c43c8ec9fde528d664d947dc4c0789be4077a3647f232869f41d9bf50e0fb", size = 11071865 }, + { url = "https://files.pythonhosted.org/packages/12/ae/993b0fb24a356e71e9a894e42b8a9eec528d4c70217353a1cd7a48bc25d4/scikit_learn-1.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a17c1dea1d56dcda2fac315712f3651a1fea86565b64b48fa1bc090249cbf236", size = 11955804 }, + { url = "https://files.pythonhosted.org/packages/d6/54/32fa2ee591af44507eac86406fa6bba968d1eb22831494470d0a2e4a1eb1/scikit_learn-1.6.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a7aa5f9908f0f28f4edaa6963c0a6183f1911e63a69aa03782f0d924c830a35", size = 11100530 }, + { url = "https://files.pythonhosted.org/packages/3f/58/55856da1adec655bdce77b502e94a267bf40a8c0b89f8622837f89503b5a/scikit_learn-1.6.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0650e730afb87402baa88afbf31c07b84c98272622aaba002559b614600ca691", size = 12433852 }, + { url = "https://files.pythonhosted.org/packages/ff/4f/c83853af13901a574f8f13b645467285a48940f185b690936bb700a50863/scikit_learn-1.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:3f59fe08dc03ea158605170eb52b22a105f238a5d512c4470ddeca71feae8e5f", size = 11337256 }, +] + +[[package]] +name = "scipy" +version = "1.15.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019", size = 38766735 }, + { url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6", size = 30173284 }, + { url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477", size = 22446958 }, + { url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c", size = 25242454 }, + { url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45", size = 35210199 }, + { url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49", size = 37309455 }, + { url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e", size = 36885140 }, + { url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539", size = 39710549 }, + { url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed", size = 40966184 }, + { url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759", size = 38728256 }, + { url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62", size = 30109540 }, + { url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb", size = 22383115 }, + { url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730", size = 25163884 }, + { url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825", size = 35174018 }, + { url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7", size = 37269716 }, + { url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11", size = 36872342 }, + { url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126", size = 39670869 }, + { url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163", size = 40988851 }, + { url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8", size = 38863011 }, + { url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5", size = 30266407 }, + { url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e", size = 22540030 }, + { url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb", size = 25218709 }, + { url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723", size = 34809045 }, + { url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb", size = 36703062 }, + { url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4", size = 36393132 }, + { url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5", size = 38979503 }, + { url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl", hash = "sha256:76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca", size = 40308097 }, +] + +[[package]] +name = "sentence-transformers" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "pillow" }, + { name = "scikit-learn" }, + { name = "scipy" }, + { name = "torch" }, + { name = "tqdm" }, + { name = "transformers" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/84/b30d1b29ff58cfdff423e36a50efd622c8e31d7039b1a0d5e72066620da1/sentence_transformers-4.1.0.tar.gz", hash = "sha256:f125ffd1c727533e0eca5d4567de72f84728de8f7482834de442fd90c2c3d50b", size = 272420 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/2d/1151b371f28caae565ad384fdc38198f1165571870217aedda230b9d7497/sentence_transformers-4.1.0-py3-none-any.whl", hash = "sha256:382a7f6be1244a100ce40495fb7523dbe8d71b3c10b299f81e6b735092b3b8ca", size = 345695 }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, +] + +[[package]] +name = "soupsieve" +version = "2.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677 }, +] + +[[package]] +name = "sse-starlette" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "starlette" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/35/7d8d94eb0474352d55f60f80ebc30f7e59441a29e18886a6425f0bccd0d3/sse_starlette-2.3.3.tar.gz", hash = "sha256:fdd47c254aad42907cfd5c5b83e2282be15be6c51197bf1a9b70b8e990522072", size = 17499 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/20/52fdb5ebb158294b0adb5662235dd396fc7e47aa31c293978d8d8942095a/sse_starlette-2.3.3-py3-none-any.whl", hash = "sha256:8b0a0ced04a329ff7341b01007580dd8cf71331cc21c0ccea677d500618da1e0", size = 10235 }, +] + +[[package]] +name = "starlette" +version = "0.45.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/fb/2984a686808b89a6781526129a4b51266f678b2d2b97ab2d325e56116df8/starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f", size = 2574076 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/61/f2b52e107b1fc8944b33ef56bf6ac4ebbe16d91b94d2b87ce013bf63fb84/starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d", size = 71507 }, +] + +[[package]] +name = "storage3" +version = "0.11.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/25/83eb4e4612dc07a3bb3cab96253c9c83752d4816f2cf38aa832dfb8d8813/storage3-0.11.3.tar.gz", hash = "sha256:883637132aad36d9d92b7c497a8a56dff7c51f15faf2ff7acbccefbbd5e97347", size = 9930 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/8d/ff89f85c4b48285ac7cddf0fafe5e55bb3742d374672b2fbd2627c213fa6/storage3-0.11.3-py3-none-any.whl", hash = "sha256:090c42152217d5d39bd94af3ddeb60c8982f3a283dcd90b53d058f2db33e6007", size = 17831 }, +] + +[[package]] +name = "strenum" +version = "0.4.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/ad/430fb60d90e1d112a62ff57bdd1f286ec73a2a0331272febfddd21f330e1/StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff", size = 23384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/69/297302c5f5f59c862faa31e6cb9a4cd74721cd1e052b38e464c5b402df8b/StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659", size = 8851 }, +] + +[[package]] +name = "supabase" +version = "2.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gotrue" }, + { name = "httpx" }, + { name = "postgrest" }, + { name = "realtime" }, + { name = "storage3" }, + { name = "supafunc" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/58/a211c4cb0fe1c139247c1e07d473da080e503969a93b7ffa5f20d6f9bb1e/supabase-2.15.1.tar.gz", hash = "sha256:66e847dab9346062aa6a25b4e81ac786b972c5d4299827c57d1d5bd6a0346070", size = 14548 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c4/ccf757e08a5b4a131e5fde89b3f6b64ab308ca765f2f3bc8f62d58007d7c/supabase-2.15.1-py3-none-any.whl", hash = "sha256:749299cdd74ecf528f52045c1e60d9dba81cc2054656f754c0ca7fba0dd34827", size = 17459 }, +] + +[[package]] +name = "supafunc" +version = "0.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "strenum" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/74/4f9e23690d2dfc0afb4a13d2d232415a6ef9b80397495afb548410035532/supafunc-0.9.4.tar.gz", hash = "sha256:68824a9a7bcccf5ab1e038cda632ba47cba27f2a7dc606014206b56f5a071de2", size = 4806 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/51/b0bb6d405c053ecf9c51267b5a429424cab9ae3de229a1dfda3197ab251f/supafunc-0.9.4-py3-none-any.whl", hash = "sha256:2b34a794fb7930953150a434cdb93c24a04cf526b2f51a9e60b2be0b86d44fb2", size = 7792 }, +] + +[[package]] +name = "sympy" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 }, +] + +[[package]] +name = "tf-playwright-stealth" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fake-http-header" }, + { name = "playwright" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/46/d73c62c4d84a06bac77e1f515560a08dee212b630afec9162c38f29c1d68/tf_playwright_stealth-1.1.2.tar.gz", hash = "sha256:d9f78890940c1d1de5b73c366f68930a206bd62d7a06aba4be32fc222ba058b4", size = 23361 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2b/10101d8db05e5b1a1fcb197bbde9ee87c6066108f546356771bc6d84b1cc/tf_playwright_stealth-1.1.2-py3-none-any.whl", hash = "sha256:050bb98d221909de40ee5e75ec7c3d351320eab3b6ad6d8df608090efc16a0c5", size = 33208 }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638 }, +] + +[[package]] +name = "tiktoken" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 }, + { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919 }, + { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877 }, + { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095 }, + { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649 }, + { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465 }, + { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669 }, +] + +[[package]] +name = "tokenizers" +version = "0.21.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/76/5ac0c97f1117b91b7eb7323dcd61af80d72f790b4df71249a7850c195f30/tokenizers-0.21.1.tar.gz", hash = "sha256:a1bb04dc5b448985f86ecd4b05407f5a8d97cb2c0532199b2a302a604a0165ab", size = 343256 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/1f/328aee25f9115bf04262e8b4e5a2050b7b7cf44b59c74e982db7270c7f30/tokenizers-0.21.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e78e413e9e668ad790a29456e677d9d3aa50a9ad311a40905d6861ba7692cf41", size = 2780767 }, + { url = "https://files.pythonhosted.org/packages/ae/1a/4526797f3719b0287853f12c5ad563a9be09d446c44ac784cdd7c50f76ab/tokenizers-0.21.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:cd51cd0a91ecc801633829fcd1fda9cf8682ed3477c6243b9a095539de4aecf3", size = 2650555 }, + { url = "https://files.pythonhosted.org/packages/4d/7a/a209b29f971a9fdc1da86f917fe4524564924db50d13f0724feed37b2a4d/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28da6b72d4fb14ee200a1bd386ff74ade8992d7f725f2bde2c495a9a98cf4d9f", size = 2937541 }, + { url = "https://files.pythonhosted.org/packages/3c/1e/b788b50ffc6191e0b1fc2b0d49df8cff16fe415302e5ceb89f619d12c5bc/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34d8cfde551c9916cb92014e040806122295a6800914bab5865deb85623931cf", size = 2819058 }, + { url = "https://files.pythonhosted.org/packages/36/aa/3626dfa09a0ecc5b57a8c58eeaeb7dd7ca9a37ad9dd681edab5acd55764c/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa852d23e125b73d283c98f007e06d4595732104b65402f46e8ef24b588d9f8", size = 3133278 }, + { url = "https://files.pythonhosted.org/packages/a4/4d/8fbc203838b3d26269f944a89459d94c858f5b3f9a9b6ee9728cdcf69161/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a21a15d5c8e603331b8a59548bbe113564136dc0f5ad8306dd5033459a226da0", size = 3144253 }, + { url = "https://files.pythonhosted.org/packages/d8/1b/2bd062adeb7c7511b847b32e356024980c0ffcf35f28947792c2d8ad2288/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fdbd4c067c60a0ac7eca14b6bd18a5bebace54eb757c706b47ea93204f7a37c", size = 3398225 }, + { url = "https://files.pythonhosted.org/packages/8a/63/38be071b0c8e06840bc6046991636bcb30c27f6bb1e670f4f4bc87cf49cc/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd9a0061e403546f7377df940e866c3e678d7d4e9643d0461ea442b4f89e61a", size = 3038874 }, + { url = "https://files.pythonhosted.org/packages/ec/83/afa94193c09246417c23a3c75a8a0a96bf44ab5630a3015538d0c316dd4b/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:db9484aeb2e200c43b915a1a0150ea885e35f357a5a8fabf7373af333dcc8dbf", size = 9014448 }, + { url = "https://files.pythonhosted.org/packages/ae/b3/0e1a37d4f84c0f014d43701c11eb8072704f6efe8d8fc2dcdb79c47d76de/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed248ab5279e601a30a4d67bdb897ecbe955a50f1e7bb62bd99f07dd11c2f5b6", size = 8937877 }, + { url = "https://files.pythonhosted.org/packages/ac/33/ff08f50e6d615eb180a4a328c65907feb6ded0b8f990ec923969759dc379/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:9ac78b12e541d4ce67b4dfd970e44c060a2147b9b2a21f509566d556a509c67d", size = 9186645 }, + { url = "https://files.pythonhosted.org/packages/5f/aa/8ae85f69a9f6012c6f8011c6f4aa1c96154c816e9eea2e1b758601157833/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e5a69c1a4496b81a5ee5d2c1f3f7fbdf95e90a0196101b0ee89ed9956b8a168f", size = 9384380 }, + { url = "https://files.pythonhosted.org/packages/e8/5b/a5d98c89f747455e8b7a9504910c865d5e51da55e825a7ae641fb5ff0a58/tokenizers-0.21.1-cp39-abi3-win32.whl", hash = "sha256:1039a3a5734944e09de1d48761ade94e00d0fa760c0e0551151d4dd851ba63e3", size = 2239506 }, + { url = "https://files.pythonhosted.org/packages/e6/b6/072a8e053ae600dcc2ac0da81a23548e3b523301a442a6ca900e92ac35be/tokenizers-0.21.1-cp39-abi3-win_amd64.whl", hash = "sha256:0f0dcbcc9f6e13e675a66d7a5f2f225a736745ce484c1a4e07476a89ccdad382", size = 2435481 }, +] + +[[package]] +name = "torch" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "jinja2" }, + { name = "networkx" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "setuptools" }, + { name = "sympy" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "typing-extensions" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/5e/ac759f4c0ab7c01feffa777bd68b43d2ac61560a9770eeac074b450f81d4/torch-2.7.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:36a6368c7ace41ad1c0f69f18056020b6a5ca47bedaca9a2f3b578f5a104c26c", size = 99013250 }, + { url = "https://files.pythonhosted.org/packages/9c/58/2d245b6f1ef61cf11dfc4aceeaacbb40fea706ccebac3f863890c720ab73/torch-2.7.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:15aab3e31c16feb12ae0a88dba3434a458874636f360c567caa6a91f6bfba481", size = 865042157 }, + { url = "https://files.pythonhosted.org/packages/44/80/b353c024e6b624cd9ce1d66dcb9d24e0294680f95b369f19280e241a0159/torch-2.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:f56d4b2510934e072bab3ab8987e00e60e1262fb238176168f5e0c43a1320c6d", size = 212482262 }, + { url = "https://files.pythonhosted.org/packages/ee/8d/b2939e5254be932db1a34b2bd099070c509e8887e0c5a90c498a917e4032/torch-2.7.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:30b7688a87239a7de83f269333651d8e582afffce6f591fff08c046f7787296e", size = 68574294 }, + { url = "https://files.pythonhosted.org/packages/14/24/720ea9a66c29151b315ea6ba6f404650834af57a26b2a04af23ec246b2d5/torch-2.7.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:868ccdc11798535b5727509480cd1d86d74220cfdc42842c4617338c1109a205", size = 99015553 }, + { url = "https://files.pythonhosted.org/packages/4b/27/285a8cf12bd7cd71f9f211a968516b07dcffed3ef0be585c6e823675ab91/torch-2.7.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9b52347118116cf3dff2ab5a3c3dd97c719eb924ac658ca2a7335652076df708", size = 865046389 }, + { url = "https://files.pythonhosted.org/packages/74/c8/2ab2b6eadc45554af8768ae99668c5a8a8552e2012c7238ded7e9e4395e1/torch-2.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:434cf3b378340efc87c758f250e884f34460624c0523fe5c9b518d205c91dd1b", size = 212490304 }, + { url = "https://files.pythonhosted.org/packages/28/fd/74ba6fde80e2b9eef4237fe668ffae302c76f0e4221759949a632ca13afa/torch-2.7.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:edad98dddd82220465b106506bb91ee5ce32bd075cddbcf2b443dfaa2cbd83bf", size = 68856166 }, + { url = "https://files.pythonhosted.org/packages/cb/b4/8df3f9fe6bdf59e56a0e538592c308d18638eb5f5dc4b08d02abb173c9f0/torch-2.7.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:2a885fc25afefb6e6eb18a7d1e8bfa01cc153e92271d980a49243b250d5ab6d9", size = 99091348 }, + { url = "https://files.pythonhosted.org/packages/9d/f5/0bd30e9da04c3036614aa1b935a9f7e505a9e4f1f731b15e165faf8a4c74/torch-2.7.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:176300ff5bc11a5f5b0784e40bde9e10a35c4ae9609beed96b4aeb46a27f5fae", size = 865104023 }, + { url = "https://files.pythonhosted.org/packages/d1/b7/2235d0c3012c596df1c8d39a3f4afc1ee1b6e318d469eda4c8bb68566448/torch-2.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d0ca446a93f474985d81dc866fcc8dccefb9460a29a456f79d99c29a78a66993", size = 212750916 }, + { url = "https://files.pythonhosted.org/packages/90/48/7e6477cf40d48cc0a61fa0d41ee9582b9a316b12772fcac17bc1a40178e7/torch-2.7.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:27f5007bdf45f7bb7af7f11d1828d5c2487e030690afb3d89a651fd7036a390e", size = 68575074 }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, +] + +[[package]] +name = "transformers" +version = "4.52.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "regex" }, + { name = "requests" }, + { name = "safetensors" }, + { name = "tokenizers" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/a9/275037087f9d846580b02f2d7cae0e0a6955d46f84583d0151d6227bd416/transformers-4.52.4.tar.gz", hash = "sha256:aff3764441c1adc192a08dba49740d3cbbcb72d850586075aed6bd89b98203e6", size = 8945376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/f2/25b27b396af03d5b64e61976b14f7209e2939e9e806c10749b6d277c273e/transformers-4.52.4-py3-none-any.whl", hash = "sha256:203f5c19416d5877e36e88633943761719538a25d9775977a24fe77a1e5adfc7", size = 10460375 }, +] + +[[package]] +name = "triton" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/53/ce18470914ab6cfbec9384ee565d23c4d1c55f0548160b1c7b33000b11fd/triton-3.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b68c778f6c4218403a6bd01be7484f6dc9e20fe2083d22dd8aef33e3b87a10a3", size = 156504509 }, + { url = "https://files.pythonhosted.org/packages/7d/74/4bf2702b65e93accaa20397b74da46fb7a0356452c1bb94dbabaf0582930/triton-3.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47bc87ad66fa4ef17968299acacecaab71ce40a238890acc6ad197c3abe2b8f1", size = 156516468 }, + { url = "https://files.pythonhosted.org/packages/0a/93/f28a696fa750b9b608baa236f8225dd3290e5aff27433b06143adc025961/triton-3.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce4700fc14032af1e049005ae94ba908e71cd6c2df682239aed08e49bc71b742", size = 156580729 }, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680 }, +] + +[[package]] +name = "uvicorn" +version = "0.34.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 }, +] + +[[package]] +name = "websockets" +version = "14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/54/8359678c726243d19fae38ca14a334e740782336c9f19700858c4eb64a1e/websockets-14.2.tar.gz", hash = "sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5", size = 164394 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/81/04f7a397653dc8bec94ddc071f34833e8b99b13ef1a3804c149d59f92c18/websockets-14.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c", size = 163096 }, + { url = "https://files.pythonhosted.org/packages/ec/c5/de30e88557e4d70988ed4d2eabd73fd3e1e52456b9f3a4e9564d86353b6d/websockets-14.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967", size = 160758 }, + { url = "https://files.pythonhosted.org/packages/e5/8c/d130d668781f2c77d106c007b6c6c1d9db68239107c41ba109f09e6c218a/websockets-14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990", size = 160995 }, + { url = "https://files.pythonhosted.org/packages/a6/bc/f6678a0ff17246df4f06765e22fc9d98d1b11a258cc50c5968b33d6742a1/websockets-14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda", size = 170815 }, + { url = "https://files.pythonhosted.org/packages/d8/b2/8070cb970c2e4122a6ef38bc5b203415fd46460e025652e1ee3f2f43a9a3/websockets-14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95", size = 169759 }, + { url = "https://files.pythonhosted.org/packages/81/da/72f7caabd94652e6eb7e92ed2d3da818626e70b4f2b15a854ef60bf501ec/websockets-14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3", size = 170178 }, + { url = "https://files.pythonhosted.org/packages/31/e0/812725b6deca8afd3a08a2e81b3c4c120c17f68c9b84522a520b816cda58/websockets-14.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9", size = 170453 }, + { url = "https://files.pythonhosted.org/packages/66/d3/8275dbc231e5ba9bb0c4f93144394b4194402a7a0c8ffaca5307a58ab5e3/websockets-14.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267", size = 169830 }, + { url = "https://files.pythonhosted.org/packages/a3/ae/e7d1a56755ae15ad5a94e80dd490ad09e345365199600b2629b18ee37bc7/websockets-14.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe", size = 169824 }, + { url = "https://files.pythonhosted.org/packages/b6/32/88ccdd63cb261e77b882e706108d072e4f1c839ed723bf91a3e1f216bf60/websockets-14.2-cp312-cp312-win32.whl", hash = "sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205", size = 163981 }, + { url = "https://files.pythonhosted.org/packages/b3/7d/32cdb77990b3bdc34a306e0a0f73a1275221e9a66d869f6ff833c95b56ef/websockets-14.2-cp312-cp312-win_amd64.whl", hash = "sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce", size = 164421 }, + { url = "https://files.pythonhosted.org/packages/82/94/4f9b55099a4603ac53c2912e1f043d6c49d23e94dd82a9ce1eb554a90215/websockets-14.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e", size = 163102 }, + { url = "https://files.pythonhosted.org/packages/8e/b7/7484905215627909d9a79ae07070057afe477433fdacb59bf608ce86365a/websockets-14.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad", size = 160766 }, + { url = "https://files.pythonhosted.org/packages/a3/a4/edb62efc84adb61883c7d2c6ad65181cb087c64252138e12d655989eec05/websockets-14.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03", size = 160998 }, + { url = "https://files.pythonhosted.org/packages/f5/79/036d320dc894b96af14eac2529967a6fc8b74f03b83c487e7a0e9043d842/websockets-14.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f", size = 170780 }, + { url = "https://files.pythonhosted.org/packages/63/75/5737d21ee4dd7e4b9d487ee044af24a935e36a9ff1e1419d684feedcba71/websockets-14.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5", size = 169717 }, + { url = "https://files.pythonhosted.org/packages/2c/3c/bf9b2c396ed86a0b4a92ff4cdaee09753d3ee389be738e92b9bbd0330b64/websockets-14.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a", size = 170155 }, + { url = "https://files.pythonhosted.org/packages/75/2d/83a5aca7247a655b1da5eb0ee73413abd5c3a57fc8b92915805e6033359d/websockets-14.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20", size = 170495 }, + { url = "https://files.pythonhosted.org/packages/79/dd/699238a92761e2f943885e091486378813ac8f43e3c84990bc394c2be93e/websockets-14.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2", size = 169880 }, + { url = "https://files.pythonhosted.org/packages/c8/c9/67a8f08923cf55ce61aadda72089e3ed4353a95a3a4bc8bf42082810e580/websockets-14.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307", size = 169856 }, + { url = "https://files.pythonhosted.org/packages/17/b1/1ffdb2680c64e9c3921d99db460546194c40d4acbef999a18c37aa4d58a3/websockets-14.2-cp313-cp313-win32.whl", hash = "sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc", size = 163974 }, + { url = "https://files.pythonhosted.org/packages/14/13/8b7fc4cb551b9cfd9890f0fd66e53c18a06240319915533b033a56a3d520/websockets-14.2-cp313-cp313-win_amd64.whl", hash = "sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f", size = 164420 }, + { url = "https://files.pythonhosted.org/packages/7b/c8/d529f8a32ce40d98309f4470780631e971a5a842b60aec864833b3615786/websockets-14.2-py3-none-any.whl", hash = "sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b", size = 157416 }, +] + +[[package]] +name = "xxhash" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/0e/1bfce2502c57d7e2e787600b31c83535af83746885aa1a5f153d8c8059d6/xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00", size = 31969 }, + { url = "https://files.pythonhosted.org/packages/3f/d6/8ca450d6fe5b71ce521b4e5db69622383d039e2b253e9b2f24f93265b52c/xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9", size = 30787 }, + { url = "https://files.pythonhosted.org/packages/5b/84/de7c89bc6ef63d750159086a6ada6416cc4349eab23f76ab870407178b93/xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84", size = 220959 }, + { url = "https://files.pythonhosted.org/packages/fe/86/51258d3e8a8545ff26468c977101964c14d56a8a37f5835bc0082426c672/xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793", size = 200006 }, + { url = "https://files.pythonhosted.org/packages/02/0a/96973bd325412feccf23cf3680fd2246aebf4b789122f938d5557c54a6b2/xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be", size = 428326 }, + { url = "https://files.pythonhosted.org/packages/11/a7/81dba5010f7e733de88af9555725146fc133be97ce36533867f4c7e75066/xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6", size = 194380 }, + { url = "https://files.pythonhosted.org/packages/fb/7d/f29006ab398a173f4501c0e4977ba288f1c621d878ec217b4ff516810c04/xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90", size = 207934 }, + { url = "https://files.pythonhosted.org/packages/8a/6e/6e88b8f24612510e73d4d70d9b0c7dff62a2e78451b9f0d042a5462c8d03/xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27", size = 216301 }, + { url = "https://files.pythonhosted.org/packages/af/51/7862f4fa4b75a25c3b4163c8a873f070532fe5f2d3f9b3fc869c8337a398/xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2", size = 203351 }, + { url = "https://files.pythonhosted.org/packages/22/61/8d6a40f288f791cf79ed5bb113159abf0c81d6efb86e734334f698eb4c59/xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d", size = 210294 }, + { url = "https://files.pythonhosted.org/packages/17/02/215c4698955762d45a8158117190261b2dbefe9ae7e5b906768c09d8bc74/xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab", size = 414674 }, + { url = "https://files.pythonhosted.org/packages/31/5c/b7a8db8a3237cff3d535261325d95de509f6a8ae439a5a7a4ffcff478189/xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e", size = 192022 }, + { url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8", size = 30170 }, + { url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e", size = 30040 }, + { url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2", size = 26796 }, + { url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6", size = 31795 }, + { url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5", size = 30792 }, + { url = "https://files.pythonhosted.org/packages/c3/cc/762312960691da989c7cd0545cb120ba2a4148741c6ba458aa723c00a3f8/xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc", size = 220950 }, + { url = "https://files.pythonhosted.org/packages/fe/e9/cc266f1042c3c13750e86a535496b58beb12bf8c50a915c336136f6168dc/xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3", size = 199980 }, + { url = "https://files.pythonhosted.org/packages/bf/85/a836cd0dc5cc20376de26b346858d0ac9656f8f730998ca4324921a010b9/xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c", size = 428324 }, + { url = "https://files.pythonhosted.org/packages/b4/0e/15c243775342ce840b9ba34aceace06a1148fa1630cd8ca269e3223987f5/xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb", size = 194370 }, + { url = "https://files.pythonhosted.org/packages/87/a1/b028bb02636dfdc190da01951d0703b3d904301ed0ef6094d948983bef0e/xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f", size = 207911 }, + { url = "https://files.pythonhosted.org/packages/80/d5/73c73b03fc0ac73dacf069fdf6036c9abad82de0a47549e9912c955ab449/xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7", size = 216352 }, + { url = "https://files.pythonhosted.org/packages/b6/2a/5043dba5ddbe35b4fe6ea0a111280ad9c3d4ba477dd0f2d1fe1129bda9d0/xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326", size = 203410 }, + { url = "https://files.pythonhosted.org/packages/a2/b2/9a8ded888b7b190aed75b484eb5c853ddd48aa2896e7b59bbfbce442f0a1/xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf", size = 210322 }, + { url = "https://files.pythonhosted.org/packages/98/62/440083fafbc917bf3e4b67c2ade621920dd905517e85631c10aac955c1d2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7", size = 414725 }, + { url = "https://files.pythonhosted.org/packages/75/db/009206f7076ad60a517e016bb0058381d96a007ce3f79fa91d3010f49cc2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c", size = 192070 }, + { url = "https://files.pythonhosted.org/packages/1f/6d/c61e0668943a034abc3a569cdc5aeae37d686d9da7e39cf2ed621d533e36/xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637", size = 30172 }, + { url = "https://files.pythonhosted.org/packages/96/14/8416dce965f35e3d24722cdf79361ae154fa23e2ab730e5323aa98d7919e/xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43", size = 30041 }, + { url = "https://files.pythonhosted.org/packages/27/ee/518b72faa2073f5aa8e3262408d284892cb79cf2754ba0c3a5870645ef73/xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b", size = 26801 }, +] + +[[package]] +name = "yarl" +version = "1.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/51/c0edba5219027f6eab262e139f73e2417b0f4efffa23bf562f6e18f76ca5/yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307", size = 185258 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/e8/3efdcb83073df978bb5b1a9cc0360ce596680e6c3fac01f2a994ccbb8939/yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f", size = 147089 }, + { url = "https://files.pythonhosted.org/packages/60/c3/9e776e98ea350f76f94dd80b408eaa54e5092643dbf65fd9babcffb60509/yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e", size = 97706 }, + { url = "https://files.pythonhosted.org/packages/0c/5b/45cdfb64a3b855ce074ae607b9fc40bc82e7613b94e7612b030255c93a09/yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e", size = 95719 }, + { url = "https://files.pythonhosted.org/packages/2d/4e/929633b249611eeed04e2f861a14ed001acca3ef9ec2a984a757b1515889/yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33", size = 343972 }, + { url = "https://files.pythonhosted.org/packages/49/fd/047535d326c913f1a90407a3baf7ff535b10098611eaef2c527e32e81ca1/yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58", size = 339639 }, + { url = "https://files.pythonhosted.org/packages/48/2f/11566f1176a78f4bafb0937c0072410b1b0d3640b297944a6a7a556e1d0b/yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f", size = 353745 }, + { url = "https://files.pythonhosted.org/packages/26/17/07dfcf034d6ae8837b33988be66045dd52f878dfb1c4e8f80a7343f677be/yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae", size = 354178 }, + { url = "https://files.pythonhosted.org/packages/15/45/212604d3142d84b4065d5f8cab6582ed3d78e4cc250568ef2a36fe1cf0a5/yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018", size = 349219 }, + { url = "https://files.pythonhosted.org/packages/e6/e0/a10b30f294111c5f1c682461e9459935c17d467a760c21e1f7db400ff499/yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672", size = 337266 }, + { url = "https://files.pythonhosted.org/packages/33/a6/6efa1d85a675d25a46a167f9f3e80104cde317dfdf7f53f112ae6b16a60a/yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8", size = 360873 }, + { url = "https://files.pythonhosted.org/packages/77/67/c8ab718cb98dfa2ae9ba0f97bf3cbb7d45d37f13fe1fbad25ac92940954e/yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7", size = 360524 }, + { url = "https://files.pythonhosted.org/packages/bd/e8/c3f18660cea1bc73d9f8a2b3ef423def8dadbbae6c4afabdb920b73e0ead/yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594", size = 365370 }, + { url = "https://files.pythonhosted.org/packages/c9/99/33f3b97b065e62ff2d52817155a89cfa030a1a9b43fee7843ef560ad9603/yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6", size = 373297 }, + { url = "https://files.pythonhosted.org/packages/3d/89/7519e79e264a5f08653d2446b26d4724b01198a93a74d2e259291d538ab1/yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1", size = 378771 }, + { url = "https://files.pythonhosted.org/packages/3a/58/6c460bbb884abd2917c3eef6f663a4a873f8dc6f498561fc0ad92231c113/yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b", size = 375000 }, + { url = "https://files.pythonhosted.org/packages/3b/2a/dd7ed1aa23fea996834278d7ff178f215b24324ee527df53d45e34d21d28/yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64", size = 86355 }, + { url = "https://files.pythonhosted.org/packages/ca/c6/333fe0338305c0ac1c16d5aa7cc4841208d3252bbe62172e0051006b5445/yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c", size = 92904 }, + { url = "https://files.pythonhosted.org/packages/0f/6f/514c9bff2900c22a4f10e06297714dbaf98707143b37ff0bcba65a956221/yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f", size = 145030 }, + { url = "https://files.pythonhosted.org/packages/4e/9d/f88da3fa319b8c9c813389bfb3463e8d777c62654c7168e580a13fadff05/yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3", size = 96894 }, + { url = "https://files.pythonhosted.org/packages/cd/57/92e83538580a6968b2451d6c89c5579938a7309d4785748e8ad42ddafdce/yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d", size = 94457 }, + { url = "https://files.pythonhosted.org/packages/e9/ee/7ee43bd4cf82dddd5da97fcaddb6fa541ab81f3ed564c42f146c83ae17ce/yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0", size = 343070 }, + { url = "https://files.pythonhosted.org/packages/4a/12/b5eccd1109e2097bcc494ba7dc5de156e41cf8309fab437ebb7c2b296ce3/yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501", size = 337739 }, + { url = "https://files.pythonhosted.org/packages/7d/6b/0eade8e49af9fc2585552f63c76fa59ef469c724cc05b29519b19aa3a6d5/yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc", size = 351338 }, + { url = "https://files.pythonhosted.org/packages/45/cb/aaaa75d30087b5183c7b8a07b4fb16ae0682dd149a1719b3a28f54061754/yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d", size = 353636 }, + { url = "https://files.pythonhosted.org/packages/98/9d/d9cb39ec68a91ba6e66fa86d97003f58570327d6713833edf7ad6ce9dde5/yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0", size = 348061 }, + { url = "https://files.pythonhosted.org/packages/72/6b/103940aae893d0cc770b4c36ce80e2ed86fcb863d48ea80a752b8bda9303/yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a", size = 334150 }, + { url = "https://files.pythonhosted.org/packages/ef/b2/986bd82aa222c3e6b211a69c9081ba46484cffa9fab2a5235e8d18ca7a27/yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2", size = 362207 }, + { url = "https://files.pythonhosted.org/packages/14/7c/63f5922437b873795d9422cbe7eb2509d4b540c37ae5548a4bb68fd2c546/yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9", size = 361277 }, + { url = "https://files.pythonhosted.org/packages/81/83/450938cccf732466953406570bdb42c62b5ffb0ac7ac75a1f267773ab5c8/yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5", size = 364990 }, + { url = "https://files.pythonhosted.org/packages/b4/de/af47d3a47e4a833693b9ec8e87debb20f09d9fdc9139b207b09a3e6cbd5a/yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877", size = 374684 }, + { url = "https://files.pythonhosted.org/packages/62/0b/078bcc2d539f1faffdc7d32cb29a2d7caa65f1a6f7e40795d8485db21851/yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e", size = 382599 }, + { url = "https://files.pythonhosted.org/packages/74/a9/4fdb1a7899f1fb47fd1371e7ba9e94bff73439ce87099d5dd26d285fffe0/yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384", size = 378573 }, + { url = "https://files.pythonhosted.org/packages/fd/be/29f5156b7a319e4d2e5b51ce622b4dfb3aa8d8204cd2a8a339340fbfad40/yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62", size = 86051 }, + { url = "https://files.pythonhosted.org/packages/52/56/05fa52c32c301da77ec0b5f63d2d9605946fe29defacb2a7ebd473c23b81/yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c", size = 92742 }, + { url = "https://files.pythonhosted.org/packages/d4/2f/422546794196519152fc2e2f475f0e1d4d094a11995c81a465faf5673ffd/yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051", size = 163575 }, + { url = "https://files.pythonhosted.org/packages/90/fc/67c64ddab6c0b4a169d03c637fb2d2a212b536e1989dec8e7e2c92211b7f/yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d", size = 106121 }, + { url = "https://files.pythonhosted.org/packages/6d/00/29366b9eba7b6f6baed7d749f12add209b987c4cfbfa418404dbadc0f97c/yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229", size = 103815 }, + { url = "https://files.pythonhosted.org/packages/28/f4/a2a4c967c8323c03689383dff73396281ced3b35d0ed140580825c826af7/yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1", size = 408231 }, + { url = "https://files.pythonhosted.org/packages/0f/a1/66f7ffc0915877d726b70cc7a896ac30b6ac5d1d2760613603b022173635/yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb", size = 390221 }, + { url = "https://files.pythonhosted.org/packages/41/15/cc248f0504610283271615e85bf38bc014224122498c2016d13a3a1b8426/yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00", size = 411400 }, + { url = "https://files.pythonhosted.org/packages/5c/af/f0823d7e092bfb97d24fce6c7269d67fcd1aefade97d0a8189c4452e4d5e/yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de", size = 411714 }, + { url = "https://files.pythonhosted.org/packages/83/70/be418329eae64b9f1b20ecdaac75d53aef098797d4c2299d82ae6f8e4663/yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5", size = 404279 }, + { url = "https://files.pythonhosted.org/packages/19/f5/52e02f0075f65b4914eb890eea1ba97e6fd91dd821cc33a623aa707b2f67/yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a", size = 384044 }, + { url = "https://files.pythonhosted.org/packages/6a/36/b0fa25226b03d3f769c68d46170b3e92b00ab3853d73127273ba22474697/yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9", size = 416236 }, + { url = "https://files.pythonhosted.org/packages/cb/3a/54c828dd35f6831dfdd5a79e6c6b4302ae2c5feca24232a83cb75132b205/yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145", size = 402034 }, + { url = "https://files.pythonhosted.org/packages/10/97/c7bf5fba488f7e049f9ad69c1b8fdfe3daa2e8916b3d321aa049e361a55a/yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda", size = 407943 }, + { url = "https://files.pythonhosted.org/packages/fd/a4/022d2555c1e8fcff08ad7f0f43e4df3aba34f135bff04dd35d5526ce54ab/yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f", size = 423058 }, + { url = "https://files.pythonhosted.org/packages/4c/f6/0873a05563e5df29ccf35345a6ae0ac9e66588b41fdb7043a65848f03139/yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd", size = 423792 }, + { url = "https://files.pythonhosted.org/packages/9e/35/43fbbd082708fa42e923f314c24f8277a28483d219e049552e5007a9aaca/yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f", size = 422242 }, + { url = "https://files.pythonhosted.org/packages/ed/f7/f0f2500cf0c469beb2050b522c7815c575811627e6d3eb9ec7550ddd0bfe/yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac", size = 93816 }, + { url = "https://files.pythonhosted.org/packages/3f/93/f73b61353b2a699d489e782c3f5998b59f974ec3156a2050a52dfd7e8946/yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe", size = 101093 }, + { url = "https://files.pythonhosted.org/packages/ea/1f/70c57b3d7278e94ed22d85e09685d3f0a38ebdd8c5c73b65ba4c0d0fe002/yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124", size = 46124 }, +] + +[[package]] +name = "zipp" +version = "3.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, +]